var/home/core/zuul-output/0000755000175000017500000000000015113550776014537 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015113555607015501 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004421215215113555601017700 0ustar rootrootDec 02 11:51:09 crc systemd[1]: Starting Kubernetes Kubelet... Dec 02 11:51:09 crc restorecon[4688]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 11:51:10 crc restorecon[4688]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 11:51:10 crc restorecon[4688]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 02 11:51:10 crc kubenswrapper[4721]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 02 11:51:10 crc kubenswrapper[4721]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 02 11:51:10 crc kubenswrapper[4721]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 02 11:51:10 crc kubenswrapper[4721]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 02 11:51:10 crc kubenswrapper[4721]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 02 11:51:10 crc kubenswrapper[4721]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.664840 4721 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669071 4721 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669094 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669102 4721 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669109 4721 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669115 4721 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669121 4721 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669128 4721 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669135 4721 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669142 4721 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669148 4721 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669154 4721 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669159 4721 feature_gate.go:330] unrecognized feature gate: Example Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669166 4721 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669171 4721 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669177 4721 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669182 4721 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669187 4721 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669193 4721 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669198 4721 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669210 4721 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669217 4721 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669223 4721 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669229 4721 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669236 4721 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669242 4721 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669248 4721 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669255 4721 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669260 4721 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669265 4721 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669271 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669278 4721 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669284 4721 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669289 4721 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669295 4721 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669301 4721 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669308 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669313 4721 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669318 4721 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669323 4721 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669329 4721 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669334 4721 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669358 4721 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669363 4721 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669368 4721 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669373 4721 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669379 4721 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669384 4721 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669389 4721 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669394 4721 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669399 4721 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669404 4721 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669410 4721 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669415 4721 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669420 4721 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669425 4721 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669430 4721 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669437 4721 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669442 4721 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669447 4721 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669452 4721 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669457 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669462 4721 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669467 4721 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669472 4721 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669477 4721 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669482 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669487 4721 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669493 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669498 4721 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669503 4721 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.669508 4721 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.669828 4721 flags.go:64] FLAG: --address="0.0.0.0" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.669844 4721 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.669855 4721 flags.go:64] FLAG: --anonymous-auth="true" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.669862 4721 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.669869 4721 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.669875 4721 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.669883 4721 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.669891 4721 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.669897 4721 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.669902 4721 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.669909 4721 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.669915 4721 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.669922 4721 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.669928 4721 flags.go:64] FLAG: --cgroup-root="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.669934 4721 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.669940 4721 flags.go:64] FLAG: --client-ca-file="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.669946 4721 flags.go:64] FLAG: --cloud-config="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.669952 4721 flags.go:64] FLAG: --cloud-provider="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.669958 4721 flags.go:64] FLAG: --cluster-dns="[]" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.669965 4721 flags.go:64] FLAG: --cluster-domain="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.669971 4721 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.669977 4721 flags.go:64] FLAG: --config-dir="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.669983 4721 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.669989 4721 flags.go:64] FLAG: --container-log-max-files="5" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670008 4721 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670014 4721 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670020 4721 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670027 4721 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670033 4721 flags.go:64] FLAG: --contention-profiling="false" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670039 4721 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670044 4721 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670051 4721 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670058 4721 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670065 4721 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670071 4721 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670077 4721 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670083 4721 flags.go:64] FLAG: --enable-load-reader="false" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670089 4721 flags.go:64] FLAG: --enable-server="true" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670095 4721 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670103 4721 flags.go:64] FLAG: --event-burst="100" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670109 4721 flags.go:64] FLAG: --event-qps="50" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670115 4721 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670123 4721 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670130 4721 flags.go:64] FLAG: --eviction-hard="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670137 4721 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670143 4721 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670149 4721 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670156 4721 flags.go:64] FLAG: --eviction-soft="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670164 4721 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670170 4721 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670176 4721 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670182 4721 flags.go:64] FLAG: --experimental-mounter-path="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670188 4721 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670193 4721 flags.go:64] FLAG: --fail-swap-on="true" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670199 4721 flags.go:64] FLAG: --feature-gates="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670206 4721 flags.go:64] FLAG: --file-check-frequency="20s" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670212 4721 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670218 4721 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670225 4721 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670230 4721 flags.go:64] FLAG: --healthz-port="10248" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670236 4721 flags.go:64] FLAG: --help="false" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670242 4721 flags.go:64] FLAG: --hostname-override="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670248 4721 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670255 4721 flags.go:64] FLAG: --http-check-frequency="20s" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670261 4721 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670266 4721 flags.go:64] FLAG: --image-credential-provider-config="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670272 4721 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670278 4721 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670285 4721 flags.go:64] FLAG: --image-service-endpoint="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670292 4721 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670298 4721 flags.go:64] FLAG: --kube-api-burst="100" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670304 4721 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670310 4721 flags.go:64] FLAG: --kube-api-qps="50" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670317 4721 flags.go:64] FLAG: --kube-reserved="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670323 4721 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670329 4721 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670335 4721 flags.go:64] FLAG: --kubelet-cgroups="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670360 4721 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670366 4721 flags.go:64] FLAG: --lock-file="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670373 4721 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670379 4721 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670385 4721 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670394 4721 flags.go:64] FLAG: --log-json-split-stream="false" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670399 4721 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670405 4721 flags.go:64] FLAG: --log-text-split-stream="false" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670411 4721 flags.go:64] FLAG: --logging-format="text" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670417 4721 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670423 4721 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670429 4721 flags.go:64] FLAG: --manifest-url="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670435 4721 flags.go:64] FLAG: --manifest-url-header="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670443 4721 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670449 4721 flags.go:64] FLAG: --max-open-files="1000000" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670456 4721 flags.go:64] FLAG: --max-pods="110" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670462 4721 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670468 4721 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670474 4721 flags.go:64] FLAG: --memory-manager-policy="None" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670479 4721 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670485 4721 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670491 4721 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670497 4721 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670511 4721 flags.go:64] FLAG: --node-status-max-images="50" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670517 4721 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670523 4721 flags.go:64] FLAG: --oom-score-adj="-999" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670529 4721 flags.go:64] FLAG: --pod-cidr="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670552 4721 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670562 4721 flags.go:64] FLAG: --pod-manifest-path="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670568 4721 flags.go:64] FLAG: --pod-max-pids="-1" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670576 4721 flags.go:64] FLAG: --pods-per-core="0" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670582 4721 flags.go:64] FLAG: --port="10250" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670588 4721 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670595 4721 flags.go:64] FLAG: --provider-id="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670600 4721 flags.go:64] FLAG: --qos-reserved="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670607 4721 flags.go:64] FLAG: --read-only-port="10255" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670614 4721 flags.go:64] FLAG: --register-node="true" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670620 4721 flags.go:64] FLAG: --register-schedulable="true" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670626 4721 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670636 4721 flags.go:64] FLAG: --registry-burst="10" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670642 4721 flags.go:64] FLAG: --registry-qps="5" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670648 4721 flags.go:64] FLAG: --reserved-cpus="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670654 4721 flags.go:64] FLAG: --reserved-memory="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670664 4721 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670671 4721 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670677 4721 flags.go:64] FLAG: --rotate-certificates="false" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670682 4721 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670688 4721 flags.go:64] FLAG: --runonce="false" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670694 4721 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670700 4721 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670706 4721 flags.go:64] FLAG: --seccomp-default="false" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670712 4721 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670718 4721 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670723 4721 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670730 4721 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670735 4721 flags.go:64] FLAG: --storage-driver-password="root" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670741 4721 flags.go:64] FLAG: --storage-driver-secure="false" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670747 4721 flags.go:64] FLAG: --storage-driver-table="stats" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670753 4721 flags.go:64] FLAG: --storage-driver-user="root" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670759 4721 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670765 4721 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670771 4721 flags.go:64] FLAG: --system-cgroups="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670777 4721 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670792 4721 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670798 4721 flags.go:64] FLAG: --tls-cert-file="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670804 4721 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670810 4721 flags.go:64] FLAG: --tls-min-version="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670817 4721 flags.go:64] FLAG: --tls-private-key-file="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670823 4721 flags.go:64] FLAG: --topology-manager-policy="none" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670829 4721 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670834 4721 flags.go:64] FLAG: --topology-manager-scope="container" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670840 4721 flags.go:64] FLAG: --v="2" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670848 4721 flags.go:64] FLAG: --version="false" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670855 4721 flags.go:64] FLAG: --vmodule="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670863 4721 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.670869 4721 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671015 4721 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671021 4721 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671027 4721 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671032 4721 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671037 4721 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671043 4721 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671048 4721 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671053 4721 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671058 4721 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671064 4721 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671069 4721 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671074 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671080 4721 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671085 4721 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671091 4721 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671096 4721 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671101 4721 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671106 4721 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671111 4721 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671116 4721 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671121 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671126 4721 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671131 4721 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671141 4721 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671147 4721 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671153 4721 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671159 4721 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671164 4721 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671169 4721 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671174 4721 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671179 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671184 4721 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671190 4721 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671195 4721 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671200 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671205 4721 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671212 4721 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671219 4721 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671225 4721 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671232 4721 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671237 4721 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671243 4721 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671249 4721 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671254 4721 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671259 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671265 4721 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671271 4721 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671278 4721 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671283 4721 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671288 4721 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671293 4721 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671300 4721 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671305 4721 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671310 4721 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671315 4721 feature_gate.go:330] unrecognized feature gate: Example Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671322 4721 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671327 4721 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671333 4721 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671358 4721 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671365 4721 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671370 4721 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671375 4721 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671380 4721 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671387 4721 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671393 4721 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671399 4721 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671404 4721 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671409 4721 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671415 4721 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671422 4721 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.671428 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.671436 4721 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.681569 4721 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.681635 4721 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.681803 4721 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.681827 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.681836 4721 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.681846 4721 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.681855 4721 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.681864 4721 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.681873 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.681881 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.681889 4721 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.681897 4721 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.681908 4721 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.681921 4721 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.681931 4721 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.681944 4721 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.681956 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.681965 4721 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.681973 4721 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.681982 4721 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.681992 4721 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682001 4721 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682009 4721 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682019 4721 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682028 4721 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682037 4721 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682046 4721 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682055 4721 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682063 4721 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682070 4721 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682078 4721 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682087 4721 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682095 4721 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682103 4721 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682110 4721 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682119 4721 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682129 4721 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682136 4721 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682144 4721 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682152 4721 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682161 4721 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682170 4721 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682179 4721 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682188 4721 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682196 4721 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682205 4721 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682213 4721 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682221 4721 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682229 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682237 4721 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682245 4721 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682253 4721 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682260 4721 feature_gate.go:330] unrecognized feature gate: Example Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682268 4721 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682276 4721 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682283 4721 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682290 4721 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682298 4721 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682306 4721 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682313 4721 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682321 4721 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682328 4721 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682337 4721 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682373 4721 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682381 4721 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682392 4721 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682401 4721 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682412 4721 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682421 4721 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682430 4721 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682438 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682447 4721 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682456 4721 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.682470 4721 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682733 4721 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682749 4721 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682757 4721 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682766 4721 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682775 4721 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682785 4721 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682794 4721 feature_gate.go:330] unrecognized feature gate: Example Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682802 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682811 4721 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682819 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682827 4721 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682834 4721 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682842 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682850 4721 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682858 4721 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682866 4721 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682873 4721 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682881 4721 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682889 4721 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682896 4721 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682904 4721 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682911 4721 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682919 4721 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682927 4721 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682935 4721 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682946 4721 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682954 4721 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682962 4721 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682969 4721 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682977 4721 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682985 4721 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.682992 4721 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683000 4721 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683007 4721 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683017 4721 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683025 4721 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683032 4721 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683040 4721 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683047 4721 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683055 4721 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683063 4721 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683071 4721 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683078 4721 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683086 4721 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683093 4721 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683102 4721 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683109 4721 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683117 4721 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683124 4721 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683132 4721 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683140 4721 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683148 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683158 4721 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683166 4721 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683174 4721 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683185 4721 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683196 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683206 4721 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683214 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683223 4721 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683233 4721 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683243 4721 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683252 4721 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683261 4721 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683270 4721 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683279 4721 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683287 4721 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683295 4721 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683302 4721 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683312 4721 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.683323 4721 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.683337 4721 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.683689 4721 server.go:940] "Client rotation is on, will bootstrap in background" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.688939 4721 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.689115 4721 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.689995 4721 server.go:997] "Starting client certificate rotation" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.690046 4721 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.690386 4721 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-19 07:56:01.243805752 +0000 UTC Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.690503 4721 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.699948 4721 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 02 11:51:10 crc kubenswrapper[4721]: E1202 11:51:10.701673 4721 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.169:6443: connect: connection refused" logger="UnhandledError" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.706420 4721 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.717365 4721 log.go:25] "Validated CRI v1 runtime API" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.742855 4721 log.go:25] "Validated CRI v1 image API" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.744796 4721 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.747790 4721 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-02-11-47-21-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.747825 4721 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.761942 4721 manager.go:217] Machine: {Timestamp:2025-12-02 11:51:10.759711471 +0000 UTC m=+0.498261840 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:a9e321cc-1477-4a2a-bfd1-fa3938fe51c8 BootID:09bbbf15-d1fe-4d39-8a50-9880fa13aa25 Filesystems:[{Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:09:07:1d Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:09:07:1d Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:f4:83:b6 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:46:0b:7c Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:be:20:b8 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:3f:49:3e Speed:-1 Mtu:1496} {Name:eth10 MacAddress:06:a0:39:22:f8:ad Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:6e:83:a7:39:a3:b4 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.762184 4721 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.762329 4721 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.762853 4721 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.763013 4721 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.763053 4721 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.763295 4721 topology_manager.go:138] "Creating topology manager with none policy" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.763305 4721 container_manager_linux.go:303] "Creating device plugin manager" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.763432 4721 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.763459 4721 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.763739 4721 state_mem.go:36] "Initialized new in-memory state store" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.763941 4721 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.764560 4721 kubelet.go:418] "Attempting to sync node with API server" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.764582 4721 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.764626 4721 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.764639 4721 kubelet.go:324] "Adding apiserver pod source" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.764651 4721 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.767114 4721 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.767487 4721 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.768777 4721 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.769337 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.769475 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.769484 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.769491 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.769504 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.769533 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.769542 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.769554 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.769565 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.769574 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.769610 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.769620 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.770636 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.169:6443: connect: connection refused Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.770642 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.169:6443: connect: connection refused Dec 02 11:51:10 crc kubenswrapper[4721]: E1202 11:51:10.770767 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.169:6443: connect: connection refused" logger="UnhandledError" Dec 02 11:51:10 crc kubenswrapper[4721]: E1202 11:51:10.770778 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.169:6443: connect: connection refused" logger="UnhandledError" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.771333 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.771820 4721 server.go:1280] "Started kubelet" Dec 02 11:51:10 crc systemd[1]: Started Kubernetes Kubelet. Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.777182 4721 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.169:6443: connect: connection refused Dec 02 11:51:10 crc kubenswrapper[4721]: E1202 11:51:10.777660 4721 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.169:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187d63b7f6e9e765 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-02 11:51:10.771791717 +0000 UTC m=+0.510342066,LastTimestamp:2025-12-02 11:51:10.771791717 +0000 UTC m=+0.510342066,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.778968 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.779035 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 04:17:50.291903932 +0000 UTC Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.779112 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 304h26m39.51282016s for next certificate rotation Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.779420 4721 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.779438 4721 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.779419 4721 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.784556 4721 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.784601 4721 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 02 11:51:10 crc kubenswrapper[4721]: E1202 11:51:10.787018 4721 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.787094 4721 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.787982 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.169:6443: connect: connection refused Dec 02 11:51:10 crc kubenswrapper[4721]: E1202 11:51:10.788022 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.169:6443: connect: connection refused" interval="200ms" Dec 02 11:51:10 crc kubenswrapper[4721]: E1202 11:51:10.788088 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.169:6443: connect: connection refused" logger="UnhandledError" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.789085 4721 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.789463 4721 server.go:460] "Adding debug handlers to kubelet server" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.791459 4721 factory.go:55] Registering systemd factory Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.791495 4721 factory.go:221] Registration of the systemd container factory successfully Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.792361 4721 factory.go:153] Registering CRI-O factory Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.792386 4721 factory.go:221] Registration of the crio container factory successfully Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.792458 4721 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.792601 4721 factory.go:103] Registering Raw factory Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.792963 4721 manager.go:1196] Started watching for new ooms in manager Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.794299 4721 manager.go:319] Starting recovery of all containers Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.798939 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799002 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799025 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799043 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799060 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799082 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799099 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799116 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799137 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799156 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799173 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799191 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799207 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799227 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799243 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799262 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799282 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799299 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799316 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799335 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799389 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799410 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799435 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799455 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799472 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799492 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799514 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799533 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799552 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799570 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799586 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799603 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799621 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799638 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799656 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799683 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799700 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799716 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799734 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799753 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799771 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799792 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799810 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799845 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799866 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799885 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799904 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799923 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799939 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799957 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799976 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.799994 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800020 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800041 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800059 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800079 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800098 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800115 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800134 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800150 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800168 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800186 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800204 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800301 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800335 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800382 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800400 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800418 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800435 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800454 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800471 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800487 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800507 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800524 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800544 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800560 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800576 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800595 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800611 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800628 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800647 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800667 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800685 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800703 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800723 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800741 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800761 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800780 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800798 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800818 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800839 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800859 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800878 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800898 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800915 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800934 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800953 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800973 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.800991 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801009 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801029 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801048 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801066 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801083 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801110 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801132 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801152 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801172 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801192 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801212 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801232 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801250 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801270 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801290 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801308 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801327 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801491 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801516 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801534 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801552 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801570 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801591 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801608 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801625 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801645 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801663 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801681 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801698 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801716 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801734 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801751 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801774 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801802 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801821 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801839 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801856 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801872 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801889 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801905 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801922 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801942 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801961 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801978 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.801997 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.802017 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.802034 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.802051 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.802069 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.802088 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.802105 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.802122 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.802139 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.802158 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.802176 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.802194 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.802211 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.802226 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.802245 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.802262 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.802281 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.802299 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.802316 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.802335 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.802383 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.802401 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.804476 4721 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.804534 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.804582 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.804600 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.804629 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.804646 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.804666 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.804696 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.804712 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.804726 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.804754 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.804771 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.804795 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.804809 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.804824 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.804846 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.804860 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.804882 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.804900 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.804919 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.804949 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.804972 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.805000 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.805017 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.805031 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.805049 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.805064 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.805085 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.805098 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.805112 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.805134 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.805150 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.805172 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.805187 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.805205 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.805225 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.805241 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.805254 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.805276 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.805289 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.805309 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.805323 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.806441 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.806475 4721 reconstruct.go:97] "Volume reconstruction finished" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.806484 4721 reconciler.go:26] "Reconciler: start to sync state" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.825039 4721 manager.go:324] Recovery completed Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.835958 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.836455 4721 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.838407 4721 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.838452 4721 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.838495 4721 kubelet.go:2335] "Starting kubelet main sync loop" Dec 02 11:51:10 crc kubenswrapper[4721]: E1202 11:51:10.838546 4721 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.838898 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.838979 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.839001 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.840138 4721 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.840211 4721 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.840241 4721 state_mem.go:36] "Initialized new in-memory state store" Dec 02 11:51:10 crc kubenswrapper[4721]: W1202 11:51:10.840985 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.169:6443: connect: connection refused Dec 02 11:51:10 crc kubenswrapper[4721]: E1202 11:51:10.841067 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.169:6443: connect: connection refused" logger="UnhandledError" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.853339 4721 policy_none.go:49] "None policy: Start" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.854110 4721 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.854166 4721 state_mem.go:35] "Initializing new in-memory state store" Dec 02 11:51:10 crc kubenswrapper[4721]: E1202 11:51:10.888159 4721 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.914335 4721 manager.go:334] "Starting Device Plugin manager" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.914731 4721 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.914816 4721 server.go:79] "Starting device plugin registration server" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.915487 4721 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.915560 4721 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.915779 4721 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.915909 4721 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.915970 4721 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 02 11:51:10 crc kubenswrapper[4721]: E1202 11:51:10.923662 4721 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.938901 4721 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.939042 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.940481 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.940541 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.940599 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.940792 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.942408 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.942524 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.942667 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.942720 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.942774 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.942988 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.943135 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.943174 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.943938 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.944046 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.944068 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.943961 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.944137 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.944155 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.944104 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.944235 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.944252 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.944322 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.944475 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.944524 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.945526 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.945569 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.945591 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.945541 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.945656 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.945669 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.945841 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.945868 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.945876 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.946839 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.946857 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.946873 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.946885 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.946898 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.946911 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.947135 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.947166 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.948554 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.948599 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:10 crc kubenswrapper[4721]: I1202 11:51:10.948613 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:10 crc kubenswrapper[4721]: E1202 11:51:10.988813 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.169:6443: connect: connection refused" interval="400ms" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.009489 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.009658 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.010065 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.010263 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.010421 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.010603 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.010790 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.010962 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.011127 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.011262 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.011307 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.011440 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.011474 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.011606 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.011684 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.015809 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.016717 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.016746 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.016757 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.016802 4721 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 11:51:11 crc kubenswrapper[4721]: E1202 11:51:11.017259 4721 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.169:6443: connect: connection refused" node="crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.113299 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.113432 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.113470 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.113518 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.113551 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.113557 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.113601 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.113637 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.113644 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.113678 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.113719 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.113750 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.113673 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.113774 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.113785 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.113829 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.113876 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.113933 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.113977 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.114046 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.114064 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.114094 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.114133 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.114167 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.114171 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.114137 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.114227 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.114265 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.114553 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.114634 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.217474 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.219521 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.219586 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.219610 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.219697 4721 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 11:51:11 crc kubenswrapper[4721]: E1202 11:51:11.220298 4721 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.169:6443: connect: connection refused" node="crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.270927 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.280198 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.303376 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.316028 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: W1202 11:51:11.317601 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-b083c00eaaa8da19a2bc3b8af08b798dbae0fa6d9a90a2821a00348a8493913a WatchSource:0}: Error finding container b083c00eaaa8da19a2bc3b8af08b798dbae0fa6d9a90a2821a00348a8493913a: Status 404 returned error can't find the container with id b083c00eaaa8da19a2bc3b8af08b798dbae0fa6d9a90a2821a00348a8493913a Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.320762 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 11:51:11 crc kubenswrapper[4721]: W1202 11:51:11.321543 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-9412a5d2264b845d587b40dfe51028d125dc5fd28563eccf418d50699c9d439b WatchSource:0}: Error finding container 9412a5d2264b845d587b40dfe51028d125dc5fd28563eccf418d50699c9d439b: Status 404 returned error can't find the container with id 9412a5d2264b845d587b40dfe51028d125dc5fd28563eccf418d50699c9d439b Dec 02 11:51:11 crc kubenswrapper[4721]: W1202 11:51:11.335417 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-bab309984d613fb38f04b8ae78a77fd1301fd2d923df29903a75caed7c57020b WatchSource:0}: Error finding container bab309984d613fb38f04b8ae78a77fd1301fd2d923df29903a75caed7c57020b: Status 404 returned error can't find the container with id bab309984d613fb38f04b8ae78a77fd1301fd2d923df29903a75caed7c57020b Dec 02 11:51:11 crc kubenswrapper[4721]: W1202 11:51:11.336988 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-41fcfccaf2fb2ccdf087e22a2d9ebb80e7c1a08b53c453951fbebec0a9cb995d WatchSource:0}: Error finding container 41fcfccaf2fb2ccdf087e22a2d9ebb80e7c1a08b53c453951fbebec0a9cb995d: Status 404 returned error can't find the container with id 41fcfccaf2fb2ccdf087e22a2d9ebb80e7c1a08b53c453951fbebec0a9cb995d Dec 02 11:51:11 crc kubenswrapper[4721]: W1202 11:51:11.341275 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-14f6a435b2795575d492e5cc2e18dbae64713dd2f486999aae0364919406cb1b WatchSource:0}: Error finding container 14f6a435b2795575d492e5cc2e18dbae64713dd2f486999aae0364919406cb1b: Status 404 returned error can't find the container with id 14f6a435b2795575d492e5cc2e18dbae64713dd2f486999aae0364919406cb1b Dec 02 11:51:11 crc kubenswrapper[4721]: E1202 11:51:11.390134 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.169:6443: connect: connection refused" interval="800ms" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.620776 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.622460 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.622508 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.622525 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.622560 4721 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 11:51:11 crc kubenswrapper[4721]: E1202 11:51:11.623108 4721 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.169:6443: connect: connection refused" node="crc" Dec 02 11:51:11 crc kubenswrapper[4721]: W1202 11:51:11.764821 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.169:6443: connect: connection refused Dec 02 11:51:11 crc kubenswrapper[4721]: E1202 11:51:11.765098 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.169:6443: connect: connection refused" logger="UnhandledError" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.778141 4721 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.169:6443: connect: connection refused Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.845122 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5"} Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.845230 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"14f6a435b2795575d492e5cc2e18dbae64713dd2f486999aae0364919406cb1b"} Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.847535 4721 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f" exitCode=0 Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.847611 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f"} Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.847630 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"41fcfccaf2fb2ccdf087e22a2d9ebb80e7c1a08b53c453951fbebec0a9cb995d"} Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.847749 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.849563 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.849592 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.849602 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.850222 4721 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1" exitCode=0 Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.850265 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1"} Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.850363 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"bab309984d613fb38f04b8ae78a77fd1301fd2d923df29903a75caed7c57020b"} Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.850555 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.851365 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.851959 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.851995 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.852011 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.852507 4721 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="8ea00657786746027504e48917155e9f6dc53c1d733db0a19c6f2cdbbb0310cd" exitCode=0 Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.852626 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"8ea00657786746027504e48917155e9f6dc53c1d733db0a19c6f2cdbbb0310cd"} Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.852649 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.852689 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.852706 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.852662 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"b083c00eaaa8da19a2bc3b8af08b798dbae0fa6d9a90a2821a00348a8493913a"} Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.852754 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.853745 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.853785 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.853796 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.856176 4721 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="fbb42628caa9c23ab5e9ba95b03ab5cf41e39c2175e3fbb1871bf08437f8498b" exitCode=0 Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.856250 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"fbb42628caa9c23ab5e9ba95b03ab5cf41e39c2175e3fbb1871bf08437f8498b"} Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.856559 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"9412a5d2264b845d587b40dfe51028d125dc5fd28563eccf418d50699c9d439b"} Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.856846 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.858761 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.858877 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:11 crc kubenswrapper[4721]: I1202 11:51:11.858978 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:12 crc kubenswrapper[4721]: W1202 11:51:12.005482 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.169:6443: connect: connection refused Dec 02 11:51:12 crc kubenswrapper[4721]: E1202 11:51:12.005565 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.169:6443: connect: connection refused" logger="UnhandledError" Dec 02 11:51:12 crc kubenswrapper[4721]: W1202 11:51:12.186061 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.169:6443: connect: connection refused Dec 02 11:51:12 crc kubenswrapper[4721]: E1202 11:51:12.186218 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.169:6443: connect: connection refused" logger="UnhandledError" Dec 02 11:51:12 crc kubenswrapper[4721]: E1202 11:51:12.191945 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.169:6443: connect: connection refused" interval="1.6s" Dec 02 11:51:12 crc kubenswrapper[4721]: W1202 11:51:12.271495 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.169:6443: connect: connection refused Dec 02 11:51:12 crc kubenswrapper[4721]: E1202 11:51:12.271625 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.169:6443: connect: connection refused" logger="UnhandledError" Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.423600 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.425029 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.425070 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.425100 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.425126 4721 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 11:51:12 crc kubenswrapper[4721]: E1202 11:51:12.425729 4721 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.169:6443: connect: connection refused" node="crc" Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.778212 4721 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.169:6443: connect: connection refused Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.876517 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f"} Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.876581 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde"} Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.876593 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0"} Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.876604 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251"} Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.883861 4721 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.884171 4721 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55" exitCode=0 Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.884229 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55"} Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.884409 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:12 crc kubenswrapper[4721]: E1202 11:51:12.884905 4721 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.169:6443: connect: connection refused" logger="UnhandledError" Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.885795 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.885825 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.885837 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.888502 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"68aedde4e421e308f6547d96dca1464131e81f2715bde922721b3122f5c41eef"} Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.888550 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.889254 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.889310 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.889322 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.893991 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"9fb6fb086b81deae49d00a8a09742c1fec519578ec195133c817d72e91b5fdf7"} Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.894031 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"8b0e89a391ffe89140ff679974abf51939d2852bf0c8b707e199be40fc5fc1f8"} Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.894044 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"1831ed7c5b757a786e658ca6a57ffa86815b41736b0d2aa32276d8311a9e8e60"} Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.894109 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.895124 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.895157 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.895167 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.901275 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61"} Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.901318 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207"} Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.901383 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887"} Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.901406 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.902180 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.902227 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:12 crc kubenswrapper[4721]: I1202 11:51:12.902240 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:13 crc kubenswrapper[4721]: I1202 11:51:13.137073 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 11:51:13 crc kubenswrapper[4721]: I1202 11:51:13.909144 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40"} Dec 02 11:51:13 crc kubenswrapper[4721]: I1202 11:51:13.909316 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:13 crc kubenswrapper[4721]: I1202 11:51:13.910994 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:13 crc kubenswrapper[4721]: I1202 11:51:13.911031 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:13 crc kubenswrapper[4721]: I1202 11:51:13.911044 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:13 crc kubenswrapper[4721]: I1202 11:51:13.914176 4721 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b" exitCode=0 Dec 02 11:51:13 crc kubenswrapper[4721]: I1202 11:51:13.914288 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:13 crc kubenswrapper[4721]: I1202 11:51:13.914802 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b"} Dec 02 11:51:13 crc kubenswrapper[4721]: I1202 11:51:13.914903 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:13 crc kubenswrapper[4721]: I1202 11:51:13.915261 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:13 crc kubenswrapper[4721]: I1202 11:51:13.915666 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:13 crc kubenswrapper[4721]: I1202 11:51:13.915319 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:13 crc kubenswrapper[4721]: I1202 11:51:13.915715 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:13 crc kubenswrapper[4721]: I1202 11:51:13.915735 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:13 crc kubenswrapper[4721]: I1202 11:51:13.915874 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:13 crc kubenswrapper[4721]: I1202 11:51:13.915960 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:13 crc kubenswrapper[4721]: I1202 11:51:13.915989 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:13 crc kubenswrapper[4721]: I1202 11:51:13.916494 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:13 crc kubenswrapper[4721]: I1202 11:51:13.916525 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:13 crc kubenswrapper[4721]: I1202 11:51:13.916540 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:13 crc kubenswrapper[4721]: I1202 11:51:13.918449 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:13 crc kubenswrapper[4721]: I1202 11:51:13.918480 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:13 crc kubenswrapper[4721]: I1202 11:51:13.918490 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:14 crc kubenswrapper[4721]: I1202 11:51:14.025883 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:14 crc kubenswrapper[4721]: I1202 11:51:14.027727 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:14 crc kubenswrapper[4721]: I1202 11:51:14.027835 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:14 crc kubenswrapper[4721]: I1202 11:51:14.027861 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:14 crc kubenswrapper[4721]: I1202 11:51:14.027906 4721 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 11:51:14 crc kubenswrapper[4721]: I1202 11:51:14.924129 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459"} Dec 02 11:51:14 crc kubenswrapper[4721]: I1202 11:51:14.924561 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595"} Dec 02 11:51:14 crc kubenswrapper[4721]: I1202 11:51:14.924592 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a"} Dec 02 11:51:14 crc kubenswrapper[4721]: I1202 11:51:14.924614 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41"} Dec 02 11:51:14 crc kubenswrapper[4721]: I1202 11:51:14.924200 4721 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 11:51:14 crc kubenswrapper[4721]: I1202 11:51:14.924704 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:14 crc kubenswrapper[4721]: I1202 11:51:14.924313 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:14 crc kubenswrapper[4721]: I1202 11:51:14.925991 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:14 crc kubenswrapper[4721]: I1202 11:51:14.926089 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:14 crc kubenswrapper[4721]: I1202 11:51:14.926103 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:14 crc kubenswrapper[4721]: I1202 11:51:14.926675 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:14 crc kubenswrapper[4721]: I1202 11:51:14.926704 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:14 crc kubenswrapper[4721]: I1202 11:51:14.926713 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:15 crc kubenswrapper[4721]: I1202 11:51:15.736573 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:51:15 crc kubenswrapper[4721]: I1202 11:51:15.935631 4721 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 11:51:15 crc kubenswrapper[4721]: I1202 11:51:15.935663 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253"} Dec 02 11:51:15 crc kubenswrapper[4721]: I1202 11:51:15.935681 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:15 crc kubenswrapper[4721]: I1202 11:51:15.935909 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:15 crc kubenswrapper[4721]: I1202 11:51:15.936589 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:15 crc kubenswrapper[4721]: I1202 11:51:15.936616 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:15 crc kubenswrapper[4721]: I1202 11:51:15.936626 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:15 crc kubenswrapper[4721]: I1202 11:51:15.937329 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:15 crc kubenswrapper[4721]: I1202 11:51:15.937481 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:15 crc kubenswrapper[4721]: I1202 11:51:15.937496 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:16 crc kubenswrapper[4721]: I1202 11:51:16.217388 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:51:16 crc kubenswrapper[4721]: I1202 11:51:16.464369 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 11:51:16 crc kubenswrapper[4721]: I1202 11:51:16.464575 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:16 crc kubenswrapper[4721]: I1202 11:51:16.465781 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:16 crc kubenswrapper[4721]: I1202 11:51:16.465849 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:16 crc kubenswrapper[4721]: I1202 11:51:16.465868 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:16 crc kubenswrapper[4721]: I1202 11:51:16.938809 4721 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 11:51:16 crc kubenswrapper[4721]: I1202 11:51:16.938885 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:16 crc kubenswrapper[4721]: I1202 11:51:16.938947 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:16 crc kubenswrapper[4721]: I1202 11:51:16.940483 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:16 crc kubenswrapper[4721]: I1202 11:51:16.940523 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:16 crc kubenswrapper[4721]: I1202 11:51:16.940523 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:16 crc kubenswrapper[4721]: I1202 11:51:16.940535 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:16 crc kubenswrapper[4721]: I1202 11:51:16.940567 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:16 crc kubenswrapper[4721]: I1202 11:51:16.940727 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:17 crc kubenswrapper[4721]: I1202 11:51:17.213289 4721 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 02 11:51:18 crc kubenswrapper[4721]: I1202 11:51:18.484450 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 02 11:51:18 crc kubenswrapper[4721]: I1202 11:51:18.484616 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:18 crc kubenswrapper[4721]: I1202 11:51:18.485633 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:18 crc kubenswrapper[4721]: I1202 11:51:18.485672 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:18 crc kubenswrapper[4721]: I1202 11:51:18.485681 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:18 crc kubenswrapper[4721]: I1202 11:51:18.933135 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 11:51:18 crc kubenswrapper[4721]: I1202 11:51:18.933314 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:18 crc kubenswrapper[4721]: I1202 11:51:18.934630 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:18 crc kubenswrapper[4721]: I1202 11:51:18.934671 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:18 crc kubenswrapper[4721]: I1202 11:51:18.934685 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:18 crc kubenswrapper[4721]: I1202 11:51:18.938215 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 11:51:18 crc kubenswrapper[4721]: I1202 11:51:18.942315 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:18 crc kubenswrapper[4721]: I1202 11:51:18.943189 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:18 crc kubenswrapper[4721]: I1202 11:51:18.943260 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:18 crc kubenswrapper[4721]: I1202 11:51:18.943286 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:19 crc kubenswrapper[4721]: I1202 11:51:19.540663 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 11:51:19 crc kubenswrapper[4721]: I1202 11:51:19.944889 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:19 crc kubenswrapper[4721]: I1202 11:51:19.945813 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:19 crc kubenswrapper[4721]: I1202 11:51:19.945847 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:19 crc kubenswrapper[4721]: I1202 11:51:19.945859 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:20 crc kubenswrapper[4721]: I1202 11:51:20.622540 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:51:20 crc kubenswrapper[4721]: I1202 11:51:20.622754 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:20 crc kubenswrapper[4721]: I1202 11:51:20.624048 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:20 crc kubenswrapper[4721]: I1202 11:51:20.624104 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:20 crc kubenswrapper[4721]: I1202 11:51:20.624127 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:20 crc kubenswrapper[4721]: E1202 11:51:20.923807 4721 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 02 11:51:21 crc kubenswrapper[4721]: I1202 11:51:21.647273 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 11:51:21 crc kubenswrapper[4721]: I1202 11:51:21.647457 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:21 crc kubenswrapper[4721]: I1202 11:51:21.648666 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:21 crc kubenswrapper[4721]: I1202 11:51:21.648708 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:21 crc kubenswrapper[4721]: I1202 11:51:21.648716 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:21 crc kubenswrapper[4721]: I1202 11:51:21.654486 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 11:51:21 crc kubenswrapper[4721]: I1202 11:51:21.949556 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:21 crc kubenswrapper[4721]: I1202 11:51:21.950563 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:21 crc kubenswrapper[4721]: I1202 11:51:21.950634 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:21 crc kubenswrapper[4721]: I1202 11:51:21.950659 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:22 crc kubenswrapper[4721]: I1202 11:51:22.463526 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 02 11:51:22 crc kubenswrapper[4721]: I1202 11:51:22.463833 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:22 crc kubenswrapper[4721]: I1202 11:51:22.465431 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:22 crc kubenswrapper[4721]: I1202 11:51:22.465493 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:22 crc kubenswrapper[4721]: I1202 11:51:22.465513 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:23 crc kubenswrapper[4721]: E1202 11:51:23.566500 4721 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": net/http: TLS handshake timeout" event="&Event{ObjectMeta:{crc.187d63b7f6e9e765 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-02 11:51:10.771791717 +0000 UTC m=+0.510342066,LastTimestamp:2025-12-02 11:51:10.771791717 +0000 UTC m=+0.510342066,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 02 11:51:23 crc kubenswrapper[4721]: W1202 11:51:23.765471 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 02 11:51:23 crc kubenswrapper[4721]: I1202 11:51:23.765573 4721 trace.go:236] Trace[891002814]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Dec-2025 11:51:13.763) (total time: 10002ms): Dec 02 11:51:23 crc kubenswrapper[4721]: Trace[891002814]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (11:51:23.765) Dec 02 11:51:23 crc kubenswrapper[4721]: Trace[891002814]: [10.002015173s] [10.002015173s] END Dec 02 11:51:23 crc kubenswrapper[4721]: E1202 11:51:23.765593 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 02 11:51:23 crc kubenswrapper[4721]: I1202 11:51:23.779087 4721 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 02 11:51:23 crc kubenswrapper[4721]: E1202 11:51:23.793429 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" interval="3.2s" Dec 02 11:51:23 crc kubenswrapper[4721]: I1202 11:51:23.932123 4721 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 02 11:51:23 crc kubenswrapper[4721]: I1202 11:51:23.932195 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 02 11:51:23 crc kubenswrapper[4721]: I1202 11:51:23.945676 4721 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 02 11:51:23 crc kubenswrapper[4721]: I1202 11:51:23.945737 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 02 11:51:24 crc kubenswrapper[4721]: I1202 11:51:24.647649 4721 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 02 11:51:24 crc kubenswrapper[4721]: I1202 11:51:24.647740 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 02 11:51:26 crc kubenswrapper[4721]: I1202 11:51:26.225383 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:51:26 crc kubenswrapper[4721]: I1202 11:51:26.225646 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:26 crc kubenswrapper[4721]: I1202 11:51:26.226968 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:26 crc kubenswrapper[4721]: I1202 11:51:26.227027 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:26 crc kubenswrapper[4721]: I1202 11:51:26.227046 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:26 crc kubenswrapper[4721]: I1202 11:51:26.230660 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:51:26 crc kubenswrapper[4721]: I1202 11:51:26.963043 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 11:51:26 crc kubenswrapper[4721]: I1202 11:51:26.964182 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:26 crc kubenswrapper[4721]: I1202 11:51:26.964270 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:26 crc kubenswrapper[4721]: I1202 11:51:26.964298 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:28 crc kubenswrapper[4721]: I1202 11:51:28.930635 4721 trace.go:236] Trace[1404937844]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Dec-2025 11:51:13.926) (total time: 15003ms): Dec 02 11:51:28 crc kubenswrapper[4721]: Trace[1404937844]: ---"Objects listed" error: 15003ms (11:51:28.930) Dec 02 11:51:28 crc kubenswrapper[4721]: Trace[1404937844]: [15.003627786s] [15.003627786s] END Dec 02 11:51:28 crc kubenswrapper[4721]: I1202 11:51:28.930665 4721 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 02 11:51:28 crc kubenswrapper[4721]: I1202 11:51:28.930811 4721 trace.go:236] Trace[547361766]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Dec-2025 11:51:14.077) (total time: 14852ms): Dec 02 11:51:28 crc kubenswrapper[4721]: Trace[547361766]: ---"Objects listed" error: 14852ms (11:51:28.930) Dec 02 11:51:28 crc kubenswrapper[4721]: Trace[547361766]: [14.85291411s] [14.85291411s] END Dec 02 11:51:28 crc kubenswrapper[4721]: I1202 11:51:28.930832 4721 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 02 11:51:28 crc kubenswrapper[4721]: I1202 11:51:28.931847 4721 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 02 11:51:28 crc kubenswrapper[4721]: I1202 11:51:28.932461 4721 trace.go:236] Trace[954949956]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Dec-2025 11:51:14.435) (total time: 14496ms): Dec 02 11:51:28 crc kubenswrapper[4721]: Trace[954949956]: ---"Objects listed" error: 14496ms (11:51:28.932) Dec 02 11:51:28 crc kubenswrapper[4721]: Trace[954949956]: [14.496646176s] [14.496646176s] END Dec 02 11:51:28 crc kubenswrapper[4721]: I1202 11:51:28.932484 4721 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 02 11:51:28 crc kubenswrapper[4721]: I1202 11:51:28.939777 4721 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 02 11:51:28 crc kubenswrapper[4721]: I1202 11:51:28.940018 4721 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 02 11:51:28 crc kubenswrapper[4721]: I1202 11:51:28.941034 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:28 crc kubenswrapper[4721]: I1202 11:51:28.941056 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:28 crc kubenswrapper[4721]: I1202 11:51:28.941066 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:28 crc kubenswrapper[4721]: I1202 11:51:28.941081 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:28 crc kubenswrapper[4721]: I1202 11:51:28.941090 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:28Z","lastTransitionTime":"2025-12-02T11:51:28Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Dec 02 11:51:28 crc kubenswrapper[4721]: I1202 11:51:28.948106 4721 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Dec 02 11:51:28 crc kubenswrapper[4721]: E1202 11:51:28.963937 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:28Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 11:51:28 crc kubenswrapper[4721]: I1202 11:51:28.970090 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:28 crc kubenswrapper[4721]: I1202 11:51:28.970126 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:28 crc kubenswrapper[4721]: I1202 11:51:28.970136 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:28 crc kubenswrapper[4721]: I1202 11:51:28.970156 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:28 crc kubenswrapper[4721]: I1202 11:51:28.970170 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:28Z","lastTransitionTime":"2025-12-02T11:51:28Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Dec 02 11:51:28 crc kubenswrapper[4721]: E1202 11:51:28.988277 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:28Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 11:51:28 crc kubenswrapper[4721]: I1202 11:51:28.994931 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:28 crc kubenswrapper[4721]: I1202 11:51:28.994971 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:28 crc kubenswrapper[4721]: I1202 11:51:28.994980 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:28 crc kubenswrapper[4721]: I1202 11:51:28.994998 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:28 crc kubenswrapper[4721]: I1202 11:51:28.995007 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:28Z","lastTransitionTime":"2025-12-02T11:51:28Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Dec 02 11:51:28 crc kubenswrapper[4721]: I1202 11:51:28.999661 4721 csr.go:261] certificate signing request csr-mmm9z is approved, waiting to be issued Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.004610 4721 csr.go:257] certificate signing request csr-mmm9z is issued Dec 02 11:51:29 crc kubenswrapper[4721]: E1202 11:51:29.012299 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:28Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.017373 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.017413 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.017424 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.017441 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.017453 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:29Z","lastTransitionTime":"2025-12-02T11:51:29Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Dec 02 11:51:29 crc kubenswrapper[4721]: E1202 11:51:29.025845 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.028887 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.028925 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.028934 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.028953 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.028969 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:29Z","lastTransitionTime":"2025-12-02T11:51:29Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Dec 02 11:51:29 crc kubenswrapper[4721]: E1202 11:51:29.037392 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 11:51:29 crc kubenswrapper[4721]: E1202 11:51:29.037506 4721 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.038959 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.038994 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.039003 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.039021 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.039030 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:29Z","lastTransitionTime":"2025-12-02T11:51:29Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.042005 4721 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:33718->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.042053 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:33718->192.168.126.11:17697: read: connection reset by peer" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.042231 4721 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:33722->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.042279 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:33722->192.168.126.11:17697: read: connection reset by peer" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.042585 4721 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.042644 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.141659 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.141694 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.141722 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.141740 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.141749 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:29Z","lastTransitionTime":"2025-12-02T11:51:29Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.244211 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.244256 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.244269 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.244287 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.244298 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:29Z","lastTransitionTime":"2025-12-02T11:51:29Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.346866 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.346911 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.346922 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.346944 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.346956 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:29Z","lastTransitionTime":"2025-12-02T11:51:29Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.448795 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.448858 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.448871 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.448913 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.448928 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:29Z","lastTransitionTime":"2025-12-02T11:51:29Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.551059 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.551095 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.551103 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.551122 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.551131 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:29Z","lastTransitionTime":"2025-12-02T11:51:29Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.652943 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.652985 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.652996 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.653017 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.653026 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:29Z","lastTransitionTime":"2025-12-02T11:51:29Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.682600 4721 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.755324 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.755392 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.755406 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.755430 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.755443 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:29Z","lastTransitionTime":"2025-12-02T11:51:29Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.777645 4721 apiserver.go:52] "Watching apiserver" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.780591 4721 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.780951 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-dns/node-resolver-mwc5f"] Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.781330 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.781455 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.781531 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.781453 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:51:29 crc kubenswrapper[4721]: E1202 11:51:29.781792 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.781824 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-mwc5f" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.781858 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.781462 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:51:29 crc kubenswrapper[4721]: E1202 11:51:29.782004 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:51:29 crc kubenswrapper[4721]: E1202 11:51:29.782041 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.783583 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.784092 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.784138 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.784329 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.784335 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.784386 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.784332 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.784842 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.785551 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.787179 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.787283 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.787354 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.788123 4721 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.801620 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.813901 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.823880 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.832570 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.835854 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.835896 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.835916 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.836173 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.836615 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.835934 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.836696 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.836716 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.836847 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.836925 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.836993 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.836996 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.837027 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.837067 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.837148 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.837170 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.837187 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.837203 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.837218 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.837233 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.837252 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.837268 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.837282 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.837614 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.837637 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.838123 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.838141 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.838427 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.837160 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.837306 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.837523 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.837575 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.837667 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.837678 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.837697 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.837714 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.837845 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.837857 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.837994 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.838019 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.838047 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.838382 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.838632 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.838652 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.838691 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.838708 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.838857 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.838940 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.838956 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.838985 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839029 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839034 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839097 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839129 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839151 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839173 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839197 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839222 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839260 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839372 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839421 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839462 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839543 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839570 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839594 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839619 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839642 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839667 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839690 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839714 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839739 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839765 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839783 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839800 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839816 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839832 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839855 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839874 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839900 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839921 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839942 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839967 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.839991 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840015 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840037 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840061 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840083 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840102 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840123 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840148 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840174 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840218 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840247 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840272 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840296 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840321 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840363 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840386 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840413 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840445 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840464 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840481 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840497 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840512 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840527 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840543 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840560 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840577 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840592 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840608 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840623 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840639 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840657 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840676 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840698 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840712 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840727 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840743 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840758 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840774 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840789 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840817 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840834 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840849 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840866 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840882 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840896 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840911 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840926 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840941 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840963 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.840984 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841006 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841030 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841054 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841076 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841096 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841118 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841140 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841159 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841182 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841206 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841231 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841257 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841287 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841305 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841320 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841359 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841382 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841399 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841418 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841434 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841452 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841469 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841488 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841503 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841518 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841533 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841549 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841564 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841579 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841595 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841611 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841633 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841657 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841679 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841702 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841726 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841742 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841758 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841776 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841790 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841806 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841822 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841838 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841853 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841842 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841874 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841893 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841910 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841928 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841945 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841961 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841979 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.841994 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842010 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842024 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842043 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842058 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842072 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842088 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842102 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842118 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842139 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842155 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842171 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842187 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842203 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842218 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842234 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842250 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842266 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842283 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842301 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842318 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842424 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842444 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842462 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842478 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842501 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842527 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842553 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842580 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842602 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842628 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842647 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842666 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842683 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842700 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842721 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842740 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842765 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842790 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842812 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842861 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842896 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842924 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842956 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.842984 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843009 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843038 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843063 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843089 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843114 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/d4a25fcd-b096-4c52-9072-d7d7c073adb9-hosts-file\") pod \"node-resolver-mwc5f\" (UID: \"d4a25fcd-b096-4c52-9072-d7d7c073adb9\") " pod="openshift-dns/node-resolver-mwc5f" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843140 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8phqx\" (UniqueName: \"kubernetes.io/projected/d4a25fcd-b096-4c52-9072-d7d7c073adb9-kube-api-access-8phqx\") pod \"node-resolver-mwc5f\" (UID: \"d4a25fcd-b096-4c52-9072-d7d7c073adb9\") " pod="openshift-dns/node-resolver-mwc5f" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843171 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843203 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843230 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843258 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843285 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843434 4721 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843454 4721 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843469 4721 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843484 4721 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843498 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843513 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843527 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843543 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843558 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843573 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843590 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843608 4721 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843626 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843640 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843654 4721 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843668 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843682 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843696 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843709 4721 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843724 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843738 4721 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843751 4721 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843765 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843779 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843792 4721 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843806 4721 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843792 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.844074 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.844271 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.844403 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.844476 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.844662 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.844764 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.844812 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.844831 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.844940 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.845155 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.845291 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.845328 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.845886 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.846371 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.846826 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.845666 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.847271 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.847276 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.847617 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.847661 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.847951 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.848020 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.848283 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.848296 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.848504 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.848576 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.848843 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.848848 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.849133 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.849535 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.849660 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.849900 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.849895 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.850051 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.850209 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.850686 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.850769 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.851053 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.851101 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.851517 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.851680 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.851691 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.851953 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.852258 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.852328 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.852351 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.843820 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.852418 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.852623 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.852820 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.852930 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.853373 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.853721 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.853787 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.853821 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.853856 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.854072 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.854118 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.854223 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.854334 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.854577 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.854634 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.854766 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.854886 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.854944 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.854982 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.855189 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.855306 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.855500 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.855604 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.855738 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.856100 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.856424 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.850457 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.856517 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.856604 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.856617 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.856934 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.857028 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.856288 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.856103 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.857266 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.857925 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.856949 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.858315 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.858529 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.858639 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.858901 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.858428 4721 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.859605 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.859856 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.859955 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.860011 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.860100 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.860123 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.860332 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.860391 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.860556 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.860756 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.860984 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.861019 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.861278 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.861287 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.861536 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.861633 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.862196 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.862224 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.862251 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.862299 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.862431 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.862598 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: E1202 11:51:29.862669 4721 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.862684 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: E1202 11:51:29.862741 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 11:51:30.362719302 +0000 UTC m=+20.101269881 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 11:51:29 crc kubenswrapper[4721]: E1202 11:51:29.862816 4721 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 11:51:29 crc kubenswrapper[4721]: E1202 11:51:29.862855 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 11:51:30.362846096 +0000 UTC m=+20.101396685 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.862892 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.862928 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.863039 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: E1202 11:51:29.863137 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:51:30.363110364 +0000 UTC m=+20.101660733 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.863153 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.863156 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.863593 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.863635 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.863697 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.863888 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.863900 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.863914 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.863945 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:29Z","lastTransitionTime":"2025-12-02T11:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.864136 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.864526 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.864595 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.866152 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.869526 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.870792 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.871016 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.871022 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.871089 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.871101 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.871742 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.871923 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.872299 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.872303 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.872984 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.873457 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.874983 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.875093 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.875166 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.877404 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.877915 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: E1202 11:51:29.878520 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 11:51:29 crc kubenswrapper[4721]: E1202 11:51:29.878538 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 11:51:29 crc kubenswrapper[4721]: E1202 11:51:29.878550 4721 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:51:29 crc kubenswrapper[4721]: E1202 11:51:29.878614 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 11:51:30.378594376 +0000 UTC m=+20.117144955 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:51:29 crc kubenswrapper[4721]: E1202 11:51:29.878685 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 11:51:29 crc kubenswrapper[4721]: E1202 11:51:29.878698 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 11:51:29 crc kubenswrapper[4721]: E1202 11:51:29.878706 4721 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:51:29 crc kubenswrapper[4721]: E1202 11:51:29.878736 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 11:51:30.37872964 +0000 UTC m=+20.117279989 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.882715 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.882886 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.882934 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.884526 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.885682 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.887380 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.887467 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.887677 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.887711 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.887804 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.888552 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.887682 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.888118 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.888277 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.889282 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.888472 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.888503 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.888845 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.888898 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.889040 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.889161 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.889655 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.889697 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.894722 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.894865 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.894883 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.895044 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.895297 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.895312 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.895470 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.895649 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.895829 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.899242 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.900037 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.905534 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.905564 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.914899 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.916220 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.920177 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.926951 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.932412 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.936081 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.952869 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8phqx\" (UniqueName: \"kubernetes.io/projected/d4a25fcd-b096-4c52-9072-d7d7c073adb9-kube-api-access-8phqx\") pod \"node-resolver-mwc5f\" (UID: \"d4a25fcd-b096-4c52-9072-d7d7c073adb9\") " pod="openshift-dns/node-resolver-mwc5f" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.952978 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/d4a25fcd-b096-4c52-9072-d7d7c073adb9-hosts-file\") pod \"node-resolver-mwc5f\" (UID: \"d4a25fcd-b096-4c52-9072-d7d7c073adb9\") " pod="openshift-dns/node-resolver-mwc5f" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953051 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953091 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953145 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953157 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953168 4721 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953180 4721 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953191 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953201 4721 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953211 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953222 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953232 4721 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953243 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953254 4721 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953265 4721 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953275 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953286 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953298 4721 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953308 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953319 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953330 4721 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953354 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953366 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953377 4721 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953388 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953398 4721 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953408 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953418 4721 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953430 4721 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953441 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953453 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953465 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953476 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953489 4721 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953502 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953514 4721 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953527 4721 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953537 4721 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953549 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953561 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953573 4721 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953585 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953596 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953608 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953619 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953631 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953641 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953653 4721 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953665 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953676 4721 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953688 4721 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953701 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953712 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953724 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953735 4721 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953747 4721 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953758 4721 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953769 4721 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953781 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953799 4721 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953810 4721 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953821 4721 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953831 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953842 4721 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953854 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953864 4721 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953876 4721 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953888 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953902 4721 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953914 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953925 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953937 4721 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953949 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953959 4721 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953970 4721 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953980 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953991 4721 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954003 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954014 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954024 4721 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954036 4721 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954047 4721 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954059 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954075 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954087 4721 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954097 4721 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954108 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954119 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954129 4721 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954141 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954151 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954162 4721 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954173 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954188 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954199 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954210 4721 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954221 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954231 4721 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954242 4721 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954253 4721 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954266 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954278 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954289 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954300 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954312 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954324 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954335 4721 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954334 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.953510 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/d4a25fcd-b096-4c52-9072-d7d7c073adb9-hosts-file\") pod \"node-resolver-mwc5f\" (UID: \"d4a25fcd-b096-4c52-9072-d7d7c073adb9\") " pod="openshift-dns/node-resolver-mwc5f" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954360 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954386 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954400 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954412 4721 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954423 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954435 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954446 4721 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954456 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954469 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954480 4721 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954491 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954502 4721 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954513 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954523 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954533 4721 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954544 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954554 4721 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954565 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954577 4721 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954545 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954587 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954674 4721 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954687 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954699 4721 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954709 4721 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954720 4721 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954733 4721 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954744 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954755 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954766 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954778 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954789 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954801 4721 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954812 4721 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954839 4721 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954858 4721 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954867 4721 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954877 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954884 4721 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954895 4721 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954903 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954911 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954919 4721 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954928 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954936 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954944 4721 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954952 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954966 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954974 4721 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954983 4721 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954991 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.954999 4721 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.955007 4721 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.955015 4721 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.955025 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.955033 4721 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.955041 4721 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.955049 4721 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.955057 4721 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.955066 4721 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.955074 4721 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.955081 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.955092 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.968175 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8phqx\" (UniqueName: \"kubernetes.io/projected/d4a25fcd-b096-4c52-9072-d7d7c073adb9-kube-api-access-8phqx\") pod \"node-resolver-mwc5f\" (UID: \"d4a25fcd-b096-4c52-9072-d7d7c073adb9\") " pod="openshift-dns/node-resolver-mwc5f" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.969240 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.969295 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.969304 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.969317 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.969325 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:29Z","lastTransitionTime":"2025-12-02T11:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.970160 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.971628 4721 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40" exitCode=255 Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.971660 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40"} Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.981504 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.981653 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.981533 4721 scope.go:117] "RemoveContainer" containerID="7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40" Dec 02 11:51:29 crc kubenswrapper[4721]: I1202 11:51:29.993030 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.000847 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.005467 4721 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-12-02 11:46:28 +0000 UTC, rotation deadline is 2026-10-18 02:57:17.33414616 +0000 UTC Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.005535 4721 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7671h5m47.328613033s for next certificate rotation Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.010557 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.020611 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.030602 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.040823 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.078091 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.078133 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.078144 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.078158 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.078167 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:30Z","lastTransitionTime":"2025-12-02T11:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.096628 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.102917 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.108984 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.115142 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-mwc5f" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.183921 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.184010 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.184022 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.184039 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.184050 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:30Z","lastTransitionTime":"2025-12-02T11:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.286431 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.286467 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.286477 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.286496 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.286504 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:30Z","lastTransitionTime":"2025-12-02T11:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.389100 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.389155 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.389169 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.389189 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.389203 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:30Z","lastTransitionTime":"2025-12-02T11:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.461666 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.461758 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.461788 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.461809 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.461830 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:51:30 crc kubenswrapper[4721]: E1202 11:51:30.461857 4721 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 11:51:30 crc kubenswrapper[4721]: E1202 11:51:30.461862 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:51:31.461832607 +0000 UTC m=+21.200383036 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:51:30 crc kubenswrapper[4721]: E1202 11:51:30.461915 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 11:51:31.461900149 +0000 UTC m=+21.200450498 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 11:51:30 crc kubenswrapper[4721]: E1202 11:51:30.461919 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 11:51:30 crc kubenswrapper[4721]: E1202 11:51:30.461931 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 11:51:30 crc kubenswrapper[4721]: E1202 11:51:30.461942 4721 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:51:30 crc kubenswrapper[4721]: E1202 11:51:30.461955 4721 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 11:51:30 crc kubenswrapper[4721]: E1202 11:51:30.462021 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 11:51:30 crc kubenswrapper[4721]: E1202 11:51:30.462068 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 11:51:30 crc kubenswrapper[4721]: E1202 11:51:30.462085 4721 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:51:30 crc kubenswrapper[4721]: E1202 11:51:30.461970 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 11:51:31.461961951 +0000 UTC m=+21.200512300 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:51:30 crc kubenswrapper[4721]: E1202 11:51:30.462166 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 11:51:31.462145497 +0000 UTC m=+21.200695846 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 11:51:30 crc kubenswrapper[4721]: E1202 11:51:30.462181 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 11:51:31.462175628 +0000 UTC m=+21.200725977 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.491138 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.491177 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.491188 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.491206 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.491217 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:30Z","lastTransitionTime":"2025-12-02T11:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.593299 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.593364 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.593381 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.593400 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.593411 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:30Z","lastTransitionTime":"2025-12-02T11:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.690497 4721 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Dec 02 11:51:30 crc kubenswrapper[4721]: W1202 11:51:30.690738 4721 reflector.go:484] object-"openshift-network-node-identity"/"env-overrides": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"env-overrides": Unexpected watch close - watch lasted less than a second and no items received Dec 02 11:51:30 crc kubenswrapper[4721]: W1202 11:51:30.690747 4721 reflector.go:484] object-"openshift-network-node-identity"/"network-node-identity-cert": watch of *v1.Secret ended with: very short watch: object-"openshift-network-node-identity"/"network-node-identity-cert": Unexpected watch close - watch lasted less than a second and no items received Dec 02 11:51:30 crc kubenswrapper[4721]: W1202 11:51:30.690770 4721 reflector.go:484] object-"openshift-network-node-identity"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 02 11:51:30 crc kubenswrapper[4721]: W1202 11:51:30.690795 4721 reflector.go:484] object-"openshift-network-operator"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-operator"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 02 11:51:30 crc kubenswrapper[4721]: W1202 11:51:30.690798 4721 reflector.go:484] object-"openshift-dns"/"node-resolver-dockercfg-kz9s7": watch of *v1.Secret ended with: very short watch: object-"openshift-dns"/"node-resolver-dockercfg-kz9s7": Unexpected watch close - watch lasted less than a second and no items received Dec 02 11:51:30 crc kubenswrapper[4721]: W1202 11:51:30.690911 4721 reflector.go:484] object-"openshift-dns"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-dns"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 02 11:51:30 crc kubenswrapper[4721]: W1202 11:51:30.690957 4721 reflector.go:484] object-"openshift-network-operator"/"iptables-alerter-script": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-operator"/"iptables-alerter-script": Unexpected watch close - watch lasted less than a second and no items received Dec 02 11:51:30 crc kubenswrapper[4721]: W1202 11:51:30.690979 4721 reflector.go:484] object-"openshift-network-operator"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-operator"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 02 11:51:30 crc kubenswrapper[4721]: W1202 11:51:30.690998 4721 reflector.go:484] object-"openshift-network-node-identity"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 02 11:51:30 crc kubenswrapper[4721]: W1202 11:51:30.691015 4721 reflector.go:484] object-"openshift-network-operator"/"metrics-tls": watch of *v1.Secret ended with: very short watch: object-"openshift-network-operator"/"metrics-tls": Unexpected watch close - watch lasted less than a second and no items received Dec 02 11:51:30 crc kubenswrapper[4721]: W1202 11:51:30.691223 4721 reflector.go:484] object-"openshift-network-node-identity"/"ovnkube-identity-cm": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"ovnkube-identity-cm": Unexpected watch close - watch lasted less than a second and no items received Dec 02 11:51:30 crc kubenswrapper[4721]: W1202 11:51:30.691377 4721 reflector.go:484] object-"openshift-dns"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-dns"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.695652 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.695683 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.695693 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.695708 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.695717 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:30Z","lastTransitionTime":"2025-12-02T11:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.797906 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.797951 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.797967 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.797985 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.797998 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:30Z","lastTransitionTime":"2025-12-02T11:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.843255 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.843976 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.844962 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.845733 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.846428 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.847043 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.847821 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.848388 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.850299 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.850780 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.850830 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.851723 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.852329 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.853204 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.853727 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.854611 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.855108 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.855793 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.856629 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.857172 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.857825 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.858679 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.859237 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.860253 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.861245 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.861854 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.862789 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.863405 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.864984 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.865654 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.866262 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.867227 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.867807 4721 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.867900 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.869957 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.870415 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.870809 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.872856 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.873648 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.874189 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.875186 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.876133 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.877165 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.877781 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.879048 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.880161 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.880702 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.881865 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.882527 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.883846 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.884679 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.885301 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.886293 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.887242 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.888287 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.889172 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.894673 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.900962 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.901021 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.901038 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.901060 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.901082 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:30Z","lastTransitionTime":"2025-12-02T11:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.915155 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.929680 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.951682 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.966982 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.975539 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c"} Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.975586 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f"} Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.975597 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"521f2d4d25566f71663b957e9f22e29ce2ae704b72d83efdc11bf9c859dda373"} Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.976865 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-mwc5f" event={"ID":"d4a25fcd-b096-4c52-9072-d7d7c073adb9","Type":"ContainerStarted","Data":"2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2"} Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.976891 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-mwc5f" event={"ID":"d4a25fcd-b096-4c52-9072-d7d7c073adb9","Type":"ContainerStarted","Data":"ac7076560c86e34efcac60b7c7f2af94a955e1324348e18ef328bc56cd9fd8f6"} Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.978153 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b"} Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.978182 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"05ddb0d19ab8a2082a118bef4ccef97ab111c95b6e8014dfe60cfab8a545e765"} Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.978972 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"d80ca5096b5211cd2a9dc6eef0d312f16682a9b7e24c484f51a98ff0c38a5a16"} Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.980954 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.982432 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957"} Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.982744 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:51:30 crc kubenswrapper[4721]: I1202 11:51:30.989665 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.003137 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.004540 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.004606 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.004621 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.004640 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.004653 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:31Z","lastTransitionTime":"2025-12-02T11:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.013825 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.023530 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.033726 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.051761 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.073753 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.086094 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.104549 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.107083 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.107122 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.107131 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.107147 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.107163 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:31Z","lastTransitionTime":"2025-12-02T11:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.209497 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.209539 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.209549 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.209564 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.209575 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:31Z","lastTransitionTime":"2025-12-02T11:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.312321 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.312375 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.312386 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.312401 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.312410 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:31Z","lastTransitionTime":"2025-12-02T11:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.340585 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-jzz4m"] Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.340937 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.342398 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.342561 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.342764 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.342849 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.342934 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.351985 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.364098 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.380379 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.395715 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.408441 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.414629 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.414688 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.414704 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.414718 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.414728 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:31Z","lastTransitionTime":"2025-12-02T11:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.421652 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.433218 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.444456 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.458999 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.471304 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.471446 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/3becf918-fa56-4f68-9963-b19fb91d4343-multus-daemon-config\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: E1202 11:51:31.471487 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:51:33.471459557 +0000 UTC m=+23.210009906 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.471536 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.471580 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-cnibin\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.471601 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-os-release\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.471619 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-host-run-netns\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: E1202 11:51:31.471633 4721 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.471642 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:51:31 crc kubenswrapper[4721]: E1202 11:51:31.471718 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 11:51:33.471699515 +0000 UTC m=+23.210249864 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 11:51:31 crc kubenswrapper[4721]: E1202 11:51:31.471725 4721 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.471738 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-system-cni-dir\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: E1202 11:51:31.471765 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 11:51:33.471758457 +0000 UTC m=+23.210308806 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.471784 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-multus-cni-dir\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.471807 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3becf918-fa56-4f68-9963-b19fb91d4343-cni-binary-copy\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.471842 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.471875 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-host-var-lib-cni-multus\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.471900 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-host-run-multus-certs\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.471921 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-etc-kubernetes\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.471944 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-host-var-lib-cni-bin\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.471965 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-multus-conf-dir\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: E1202 11:51:31.472057 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 11:51:31 crc kubenswrapper[4721]: E1202 11:51:31.472096 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.472098 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbxk8\" (UniqueName: \"kubernetes.io/projected/3becf918-fa56-4f68-9963-b19fb91d4343-kube-api-access-rbxk8\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: E1202 11:51:31.472120 4721 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.472149 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-hostroot\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: E1202 11:51:31.472187 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 11:51:33.47216559 +0000 UTC m=+23.210715949 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.472224 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.472279 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-multus-socket-dir-parent\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.472302 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-host-run-k8s-cni-cncf-io\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: E1202 11:51:31.472387 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.472396 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-host-var-lib-kubelet\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: E1202 11:51:31.472413 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 11:51:31 crc kubenswrapper[4721]: E1202 11:51:31.472448 4721 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:51:31 crc kubenswrapper[4721]: E1202 11:51:31.472513 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 11:51:33.472495951 +0000 UTC m=+23.211046300 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.511994 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.517652 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.517694 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.517648 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.517704 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.517733 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.517743 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:31Z","lastTransitionTime":"2025-12-02T11:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.573212 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-multus-socket-dir-parent\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.573252 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-host-run-k8s-cni-cncf-io\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.573273 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-host-var-lib-kubelet\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.573292 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-os-release\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.573309 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-host-run-netns\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.573326 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/3becf918-fa56-4f68-9963-b19fb91d4343-multus-daemon-config\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.573370 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-cnibin\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.573387 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-multus-cni-dir\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.573402 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3becf918-fa56-4f68-9963-b19fb91d4343-cni-binary-copy\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.573424 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-system-cni-dir\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.573429 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-host-var-lib-kubelet\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.573476 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-host-run-netns\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.573447 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-host-var-lib-cni-multus\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.573485 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-host-var-lib-cni-multus\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.573440 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-host-run-k8s-cni-cncf-io\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.573501 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-os-release\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.573414 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-multus-socket-dir-parent\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.573547 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-system-cni-dir\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.573598 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-cnibin\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.573630 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-host-var-lib-cni-bin\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.573737 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-multus-conf-dir\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.573626 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-multus-cni-dir\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.573762 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-host-run-multus-certs\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.573661 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-host-var-lib-cni-bin\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.573799 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-etc-kubernetes\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.573821 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbxk8\" (UniqueName: \"kubernetes.io/projected/3becf918-fa56-4f68-9963-b19fb91d4343-kube-api-access-rbxk8\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.573826 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-multus-conf-dir\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.573858 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-host-run-multus-certs\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.573952 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-etc-kubernetes\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.574024 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-hostroot\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.574071 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/3becf918-fa56-4f68-9963-b19fb91d4343-hostroot\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.574108 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/3becf918-fa56-4f68-9963-b19fb91d4343-multus-daemon-config\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.574219 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3becf918-fa56-4f68-9963-b19fb91d4343-cni-binary-copy\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.585591 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.590076 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.618051 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbxk8\" (UniqueName: \"kubernetes.io/projected/3becf918-fa56-4f68-9963-b19fb91d4343-kube-api-access-rbxk8\") pod \"multus-jzz4m\" (UID: \"3becf918-fa56-4f68-9963-b19fb91d4343\") " pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.620064 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.620088 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.620096 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.620111 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.620119 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:31Z","lastTransitionTime":"2025-12-02T11:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.636854 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.650564 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-jzz4m" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.651697 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.656273 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 11:51:31 crc kubenswrapper[4721]: W1202 11:51:31.668203 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3becf918_fa56_4f68_9963_b19fb91d4343.slice/crio-40397aee3e3681d5a607ddf7856140223bff6b3288fe7e22b5f43f36d4c4442f WatchSource:0}: Error finding container 40397aee3e3681d5a607ddf7856140223bff6b3288fe7e22b5f43f36d4c4442f: Status 404 returned error can't find the container with id 40397aee3e3681d5a607ddf7856140223bff6b3288fe7e22b5f43f36d4c4442f Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.681113 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.688465 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.704633 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.714383 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.721792 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.722671 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.722700 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.722710 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.722725 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.722735 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:31Z","lastTransitionTime":"2025-12-02T11:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.727754 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-mcr6s"] Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.728111 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.728193 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-cflql"] Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.728846 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-cflql" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.730157 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.732687 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.733303 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.734264 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.734787 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.736105 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.736322 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.750217 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.761967 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.768759 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.782957 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.793634 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.807049 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.817529 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.818820 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.823275 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.824760 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.824785 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.824793 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.824807 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.824817 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:31Z","lastTransitionTime":"2025-12-02T11:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.830490 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.839145 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.839204 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.839269 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:51:31 crc kubenswrapper[4721]: E1202 11:51:31.839371 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:51:31 crc kubenswrapper[4721]: E1202 11:51:31.839478 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:51:31 crc kubenswrapper[4721]: E1202 11:51:31.839535 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.853869 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.873428 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.876415 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/8aa4e714-4184-4fad-ad5c-d7bbf1575cb7-tuning-conf-dir\") pod \"multus-additional-cni-plugins-cflql\" (UID: \"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\") " pod="openshift-multus/multus-additional-cni-plugins-cflql" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.876446 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/141842b6-fd0c-48e6-9795-a627e6f96bd4-mcd-auth-proxy-config\") pod \"machine-config-daemon-mcr6s\" (UID: \"141842b6-fd0c-48e6-9795-a627e6f96bd4\") " pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.876483 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/8aa4e714-4184-4fad-ad5c-d7bbf1575cb7-cni-binary-copy\") pod \"multus-additional-cni-plugins-cflql\" (UID: \"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\") " pod="openshift-multus/multus-additional-cni-plugins-cflql" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.876499 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/8aa4e714-4184-4fad-ad5c-d7bbf1575cb7-os-release\") pod \"multus-additional-cni-plugins-cflql\" (UID: \"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\") " pod="openshift-multus/multus-additional-cni-plugins-cflql" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.876517 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8aa4e714-4184-4fad-ad5c-d7bbf1575cb7-system-cni-dir\") pod \"multus-additional-cni-plugins-cflql\" (UID: \"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\") " pod="openshift-multus/multus-additional-cni-plugins-cflql" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.876694 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qc4xk\" (UniqueName: \"kubernetes.io/projected/8aa4e714-4184-4fad-ad5c-d7bbf1575cb7-kube-api-access-qc4xk\") pod \"multus-additional-cni-plugins-cflql\" (UID: \"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\") " pod="openshift-multus/multus-additional-cni-plugins-cflql" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.876779 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/141842b6-fd0c-48e6-9795-a627e6f96bd4-rootfs\") pod \"machine-config-daemon-mcr6s\" (UID: \"141842b6-fd0c-48e6-9795-a627e6f96bd4\") " pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.876805 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/141842b6-fd0c-48e6-9795-a627e6f96bd4-proxy-tls\") pod \"machine-config-daemon-mcr6s\" (UID: \"141842b6-fd0c-48e6-9795-a627e6f96bd4\") " pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.876831 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/8aa4e714-4184-4fad-ad5c-d7bbf1575cb7-cnibin\") pod \"multus-additional-cni-plugins-cflql\" (UID: \"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\") " pod="openshift-multus/multus-additional-cni-plugins-cflql" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.876857 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/8aa4e714-4184-4fad-ad5c-d7bbf1575cb7-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-cflql\" (UID: \"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\") " pod="openshift-multus/multus-additional-cni-plugins-cflql" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.876886 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-blrdv\" (UniqueName: \"kubernetes.io/projected/141842b6-fd0c-48e6-9795-a627e6f96bd4-kube-api-access-blrdv\") pod \"machine-config-daemon-mcr6s\" (UID: \"141842b6-fd0c-48e6-9795-a627e6f96bd4\") " pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.901225 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.918085 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.927643 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.927686 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.927697 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.927713 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.927724 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:31Z","lastTransitionTime":"2025-12-02T11:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.935749 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.951008 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.964461 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.975973 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.977947 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/141842b6-fd0c-48e6-9795-a627e6f96bd4-rootfs\") pod \"machine-config-daemon-mcr6s\" (UID: \"141842b6-fd0c-48e6-9795-a627e6f96bd4\") " pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.977974 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/141842b6-fd0c-48e6-9795-a627e6f96bd4-proxy-tls\") pod \"machine-config-daemon-mcr6s\" (UID: \"141842b6-fd0c-48e6-9795-a627e6f96bd4\") " pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.977992 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/8aa4e714-4184-4fad-ad5c-d7bbf1575cb7-cnibin\") pod \"multus-additional-cni-plugins-cflql\" (UID: \"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\") " pod="openshift-multus/multus-additional-cni-plugins-cflql" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.978008 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-blrdv\" (UniqueName: \"kubernetes.io/projected/141842b6-fd0c-48e6-9795-a627e6f96bd4-kube-api-access-blrdv\") pod \"machine-config-daemon-mcr6s\" (UID: \"141842b6-fd0c-48e6-9795-a627e6f96bd4\") " pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.978025 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/8aa4e714-4184-4fad-ad5c-d7bbf1575cb7-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-cflql\" (UID: \"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\") " pod="openshift-multus/multus-additional-cni-plugins-cflql" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.978053 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/8aa4e714-4184-4fad-ad5c-d7bbf1575cb7-tuning-conf-dir\") pod \"multus-additional-cni-plugins-cflql\" (UID: \"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\") " pod="openshift-multus/multus-additional-cni-plugins-cflql" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.978073 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/141842b6-fd0c-48e6-9795-a627e6f96bd4-mcd-auth-proxy-config\") pod \"machine-config-daemon-mcr6s\" (UID: \"141842b6-fd0c-48e6-9795-a627e6f96bd4\") " pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.978102 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/8aa4e714-4184-4fad-ad5c-d7bbf1575cb7-cni-binary-copy\") pod \"multus-additional-cni-plugins-cflql\" (UID: \"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\") " pod="openshift-multus/multus-additional-cni-plugins-cflql" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.978117 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/141842b6-fd0c-48e6-9795-a627e6f96bd4-rootfs\") pod \"machine-config-daemon-mcr6s\" (UID: \"141842b6-fd0c-48e6-9795-a627e6f96bd4\") " pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.978127 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/8aa4e714-4184-4fad-ad5c-d7bbf1575cb7-os-release\") pod \"multus-additional-cni-plugins-cflql\" (UID: \"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\") " pod="openshift-multus/multus-additional-cni-plugins-cflql" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.978175 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8aa4e714-4184-4fad-ad5c-d7bbf1575cb7-system-cni-dir\") pod \"multus-additional-cni-plugins-cflql\" (UID: \"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\") " pod="openshift-multus/multus-additional-cni-plugins-cflql" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.978181 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/8aa4e714-4184-4fad-ad5c-d7bbf1575cb7-os-release\") pod \"multus-additional-cni-plugins-cflql\" (UID: \"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\") " pod="openshift-multus/multus-additional-cni-plugins-cflql" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.978212 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qc4xk\" (UniqueName: \"kubernetes.io/projected/8aa4e714-4184-4fad-ad5c-d7bbf1575cb7-kube-api-access-qc4xk\") pod \"multus-additional-cni-plugins-cflql\" (UID: \"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\") " pod="openshift-multus/multus-additional-cni-plugins-cflql" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.978218 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/8aa4e714-4184-4fad-ad5c-d7bbf1575cb7-cnibin\") pod \"multus-additional-cni-plugins-cflql\" (UID: \"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\") " pod="openshift-multus/multus-additional-cni-plugins-cflql" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.978288 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8aa4e714-4184-4fad-ad5c-d7bbf1575cb7-system-cni-dir\") pod \"multus-additional-cni-plugins-cflql\" (UID: \"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\") " pod="openshift-multus/multus-additional-cni-plugins-cflql" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.978548 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/8aa4e714-4184-4fad-ad5c-d7bbf1575cb7-tuning-conf-dir\") pod \"multus-additional-cni-plugins-cflql\" (UID: \"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\") " pod="openshift-multus/multus-additional-cni-plugins-cflql" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.984572 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.985802 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jzz4m" event={"ID":"3becf918-fa56-4f68-9963-b19fb91d4343","Type":"ContainerStarted","Data":"ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e"} Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.985850 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jzz4m" event={"ID":"3becf918-fa56-4f68-9963-b19fb91d4343","Type":"ContainerStarted","Data":"40397aee3e3681d5a607ddf7856140223bff6b3288fe7e22b5f43f36d4c4442f"} Dec 02 11:51:31 crc kubenswrapper[4721]: I1202 11:51:31.994626 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.000376 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/141842b6-fd0c-48e6-9795-a627e6f96bd4-mcd-auth-proxy-config\") pod \"machine-config-daemon-mcr6s\" (UID: \"141842b6-fd0c-48e6-9795-a627e6f96bd4\") " pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.000391 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/8aa4e714-4184-4fad-ad5c-d7bbf1575cb7-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-cflql\" (UID: \"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\") " pod="openshift-multus/multus-additional-cni-plugins-cflql" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.003056 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/141842b6-fd0c-48e6-9795-a627e6f96bd4-proxy-tls\") pod \"machine-config-daemon-mcr6s\" (UID: \"141842b6-fd0c-48e6-9795-a627e6f96bd4\") " pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.003909 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-blrdv\" (UniqueName: \"kubernetes.io/projected/141842b6-fd0c-48e6-9795-a627e6f96bd4-kube-api-access-blrdv\") pod \"machine-config-daemon-mcr6s\" (UID: \"141842b6-fd0c-48e6-9795-a627e6f96bd4\") " pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.005858 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qc4xk\" (UniqueName: \"kubernetes.io/projected/8aa4e714-4184-4fad-ad5c-d7bbf1575cb7-kube-api-access-qc4xk\") pod \"multus-additional-cni-plugins-cflql\" (UID: \"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\") " pod="openshift-multus/multus-additional-cni-plugins-cflql" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.006532 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/8aa4e714-4184-4fad-ad5c-d7bbf1575cb7-cni-binary-copy\") pod \"multus-additional-cni-plugins-cflql\" (UID: \"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\") " pod="openshift-multus/multus-additional-cni-plugins-cflql" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.007098 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.018034 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.028783 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.029613 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.029645 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.029653 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.029667 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.029677 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:32Z","lastTransitionTime":"2025-12-02T11:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.039557 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.043207 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.047622 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.049779 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-cflql" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.050141 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: W1202 11:51:32.067604 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8aa4e714_4184_4fad_ad5c_d7bbf1575cb7.slice/crio-57c3ccd8de4505aed518cb969fe56461afec52c78018aaa446a419de28717fcc WatchSource:0}: Error finding container 57c3ccd8de4505aed518cb969fe56461afec52c78018aaa446a419de28717fcc: Status 404 returned error can't find the container with id 57c3ccd8de4505aed518cb969fe56461afec52c78018aaa446a419de28717fcc Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.071940 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.089676 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.101829 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.105712 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.117463 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.130432 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.133163 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.133210 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.133223 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.133239 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.133250 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:32Z","lastTransitionTime":"2025-12-02T11:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.133959 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-gbxxd"] Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.134996 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.137503 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.137826 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.138007 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.138333 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.145286 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.145311 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.145326 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.148393 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.163585 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.165971 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.180406 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.193434 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.206103 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.218195 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.233365 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.235206 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.235245 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.235258 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.235275 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.235313 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:32Z","lastTransitionTime":"2025-12-02T11:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.249837 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.262181 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.276782 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.280857 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-run-netns\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.280890 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-etc-openvswitch\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.280907 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-node-log\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.280924 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-slash\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.280939 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-run-openvswitch\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.280962 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-var-lib-openvswitch\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.280978 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-run-ovn\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.280992 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-cni-netd\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.281006 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-run-systemd\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.281021 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e2a903a9-d920-48ed-8450-fa2a4fdb458b-ovnkube-script-lib\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.281043 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-systemd-units\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.281057 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5c4m9\" (UniqueName: \"kubernetes.io/projected/e2a903a9-d920-48ed-8450-fa2a4fdb458b-kube-api-access-5c4m9\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.281073 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.281088 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e2a903a9-d920-48ed-8450-fa2a4fdb458b-ovnkube-config\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.281102 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e2a903a9-d920-48ed-8450-fa2a4fdb458b-ovn-node-metrics-cert\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.281124 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-run-ovn-kubernetes\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.281143 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-kubelet\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.281158 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-cni-bin\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.281173 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e2a903a9-d920-48ed-8450-fa2a4fdb458b-env-overrides\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.281186 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-log-socket\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.290001 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.325430 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.337397 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.337439 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.337452 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.337469 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.337481 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:32Z","lastTransitionTime":"2025-12-02T11:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.369773 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.382367 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-var-lib-openvswitch\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.382422 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-run-ovn\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.382443 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-run-systemd\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.382458 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-cni-netd\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.382492 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e2a903a9-d920-48ed-8450-fa2a4fdb458b-ovnkube-script-lib\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.382517 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5c4m9\" (UniqueName: \"kubernetes.io/projected/e2a903a9-d920-48ed-8450-fa2a4fdb458b-kube-api-access-5c4m9\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.382519 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-var-lib-openvswitch\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.382540 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-run-systemd\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.382534 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-systemd-units\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.382539 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-run-ovn\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.382557 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-cni-netd\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.383259 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-systemd-units\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.383358 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e2a903a9-d920-48ed-8450-fa2a4fdb458b-ovnkube-config\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.383552 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e2a903a9-d920-48ed-8450-fa2a4fdb458b-ovn-node-metrics-cert\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.383610 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.383672 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-run-ovn-kubernetes\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.383723 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-kubelet\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.383750 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-cni-bin\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.383746 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.383770 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e2a903a9-d920-48ed-8450-fa2a4fdb458b-ovnkube-script-lib\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.383784 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e2a903a9-d920-48ed-8450-fa2a4fdb458b-env-overrides\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.383829 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-cni-bin\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.383831 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-run-ovn-kubernetes\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.383855 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-kubelet\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.383869 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-log-socket\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.383912 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-node-log\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.383941 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-run-netns\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.383950 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-log-socket\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.383975 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-node-log\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.383979 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-etc-openvswitch\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.383997 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-run-netns\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.384018 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-slash\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.384047 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-etc-openvswitch\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.384050 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-run-openvswitch\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.384078 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-slash\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.384080 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e2a903a9-d920-48ed-8450-fa2a4fdb458b-ovnkube-config\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.384082 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-run-openvswitch\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.384257 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e2a903a9-d920-48ed-8450-fa2a4fdb458b-env-overrides\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.387724 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e2a903a9-d920-48ed-8450-fa2a4fdb458b-ovn-node-metrics-cert\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.407233 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.434567 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5c4m9\" (UniqueName: \"kubernetes.io/projected/e2a903a9-d920-48ed-8450-fa2a4fdb458b-kube-api-access-5c4m9\") pod \"ovnkube-node-gbxxd\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.439644 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.439679 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.439688 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.439703 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.439713 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:32Z","lastTransitionTime":"2025-12-02T11:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.453917 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.468427 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.487603 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.499568 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 02 11:51:32 crc kubenswrapper[4721]: W1202 11:51:32.501624 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode2a903a9_d920_48ed_8450_fa2a4fdb458b.slice/crio-23a4985a9eba9c06f2df46284c711cba193c8454534555b65219ca8392293e7b WatchSource:0}: Error finding container 23a4985a9eba9c06f2df46284c711cba193c8454534555b65219ca8392293e7b: Status 404 returned error can't find the container with id 23a4985a9eba9c06f2df46284c711cba193c8454534555b65219ca8392293e7b Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.512376 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.529254 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.541622 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.541667 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.541681 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.541697 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.541707 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:32Z","lastTransitionTime":"2025-12-02T11:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.568694 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.607846 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.646552 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.646603 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.646617 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.646637 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.646650 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:32Z","lastTransitionTime":"2025-12-02T11:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.649739 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.687478 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.727923 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.749224 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.749254 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.749263 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.749277 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.749286 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:32Z","lastTransitionTime":"2025-12-02T11:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.767983 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.815515 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.851838 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.851883 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.851894 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.851912 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.851925 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:32Z","lastTransitionTime":"2025-12-02T11:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.859115 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.889673 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.926959 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.954096 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.954128 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.954136 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.954150 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.954160 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:32Z","lastTransitionTime":"2025-12-02T11:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.968378 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:32Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.989803 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" event={"ID":"141842b6-fd0c-48e6-9795-a627e6f96bd4","Type":"ContainerStarted","Data":"57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b"} Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.989852 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" event={"ID":"141842b6-fd0c-48e6-9795-a627e6f96bd4","Type":"ContainerStarted","Data":"408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff"} Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.989886 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" event={"ID":"141842b6-fd0c-48e6-9795-a627e6f96bd4","Type":"ContainerStarted","Data":"8f5226d5802996faed39f90d54d889cdc8070c9c7ed72888b53ce07efdba7118"} Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.990939 4721 generic.go:334] "Generic (PLEG): container finished" podID="8aa4e714-4184-4fad-ad5c-d7bbf1575cb7" containerID="5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040" exitCode=0 Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.991005 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" event={"ID":"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7","Type":"ContainerDied","Data":"5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040"} Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.991030 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" event={"ID":"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7","Type":"ContainerStarted","Data":"57c3ccd8de4505aed518cb969fe56461afec52c78018aaa446a419de28717fcc"} Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.992130 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9"} Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.993181 4721 generic.go:334] "Generic (PLEG): container finished" podID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerID="92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e" exitCode=0 Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.993201 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" event={"ID":"e2a903a9-d920-48ed-8450-fa2a4fdb458b","Type":"ContainerDied","Data":"92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e"} Dec 02 11:51:32 crc kubenswrapper[4721]: I1202 11:51:32.993224 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" event={"ID":"e2a903a9-d920-48ed-8450-fa2a4fdb458b","Type":"ContainerStarted","Data":"23a4985a9eba9c06f2df46284c711cba193c8454534555b65219ca8392293e7b"} Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.008740 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:33Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.050225 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:33Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.056459 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.056495 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.056507 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.056523 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.056533 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:33Z","lastTransitionTime":"2025-12-02T11:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.112158 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:33Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.132218 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:33Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.159776 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.159801 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.159810 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.159822 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.159831 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:33Z","lastTransitionTime":"2025-12-02T11:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.167019 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:33Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.207597 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:33Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.247878 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:33Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.250821 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-d9hjh"] Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.251191 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-d9hjh" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.262682 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.262739 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.262754 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.262774 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.262786 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:33Z","lastTransitionTime":"2025-12-02T11:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.279814 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.299531 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.321135 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.339767 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.364582 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.364602 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.364610 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.364622 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.364632 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:33Z","lastTransitionTime":"2025-12-02T11:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.370857 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:33Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.394140 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h569b\" (UniqueName: \"kubernetes.io/projected/eca136da-2294-4180-9a48-b4ac2547ed50-kube-api-access-h569b\") pod \"node-ca-d9hjh\" (UID: \"eca136da-2294-4180-9a48-b4ac2547ed50\") " pod="openshift-image-registry/node-ca-d9hjh" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.394187 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/eca136da-2294-4180-9a48-b4ac2547ed50-host\") pod \"node-ca-d9hjh\" (UID: \"eca136da-2294-4180-9a48-b4ac2547ed50\") " pod="openshift-image-registry/node-ca-d9hjh" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.394237 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/eca136da-2294-4180-9a48-b4ac2547ed50-serviceca\") pod \"node-ca-d9hjh\" (UID: \"eca136da-2294-4180-9a48-b4ac2547ed50\") " pod="openshift-image-registry/node-ca-d9hjh" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.407719 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:33Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.449263 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:33Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.467985 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.468256 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.468267 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.468283 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.468295 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:33Z","lastTransitionTime":"2025-12-02T11:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.486059 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:33Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.495471 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.495575 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h569b\" (UniqueName: \"kubernetes.io/projected/eca136da-2294-4180-9a48-b4ac2547ed50-kube-api-access-h569b\") pod \"node-ca-d9hjh\" (UID: \"eca136da-2294-4180-9a48-b4ac2547ed50\") " pod="openshift-image-registry/node-ca-d9hjh" Dec 02 11:51:33 crc kubenswrapper[4721]: E1202 11:51:33.495613 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:51:37.495588214 +0000 UTC m=+27.234138733 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.495651 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.495703 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/eca136da-2294-4180-9a48-b4ac2547ed50-host\") pod \"node-ca-d9hjh\" (UID: \"eca136da-2294-4180-9a48-b4ac2547ed50\") " pod="openshift-image-registry/node-ca-d9hjh" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.495726 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.495763 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:51:33 crc kubenswrapper[4721]: E1202 11:51:33.495770 4721 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.495782 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:51:33 crc kubenswrapper[4721]: E1202 11:51:33.495824 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 11:51:37.495805941 +0000 UTC m=+27.234356290 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.495843 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/eca136da-2294-4180-9a48-b4ac2547ed50-serviceca\") pod \"node-ca-d9hjh\" (UID: \"eca136da-2294-4180-9a48-b4ac2547ed50\") " pod="openshift-image-registry/node-ca-d9hjh" Dec 02 11:51:33 crc kubenswrapper[4721]: E1202 11:51:33.495894 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 11:51:33 crc kubenswrapper[4721]: E1202 11:51:33.495907 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 11:51:33 crc kubenswrapper[4721]: E1202 11:51:33.495917 4721 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:51:33 crc kubenswrapper[4721]: E1202 11:51:33.495953 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 11:51:37.495945876 +0000 UTC m=+27.234496225 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.495982 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/eca136da-2294-4180-9a48-b4ac2547ed50-host\") pod \"node-ca-d9hjh\" (UID: \"eca136da-2294-4180-9a48-b4ac2547ed50\") " pod="openshift-image-registry/node-ca-d9hjh" Dec 02 11:51:33 crc kubenswrapper[4721]: E1202 11:51:33.496039 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 11:51:33 crc kubenswrapper[4721]: E1202 11:51:33.496050 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 11:51:33 crc kubenswrapper[4721]: E1202 11:51:33.496056 4721 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:51:33 crc kubenswrapper[4721]: E1202 11:51:33.496079 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 11:51:37.49607293 +0000 UTC m=+27.234623279 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:51:33 crc kubenswrapper[4721]: E1202 11:51:33.496112 4721 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 11:51:33 crc kubenswrapper[4721]: E1202 11:51:33.496137 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 11:51:37.496128162 +0000 UTC m=+27.234678751 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.496888 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/eca136da-2294-4180-9a48-b4ac2547ed50-serviceca\") pod \"node-ca-d9hjh\" (UID: \"eca136da-2294-4180-9a48-b4ac2547ed50\") " pod="openshift-image-registry/node-ca-d9hjh" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.536026 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h569b\" (UniqueName: \"kubernetes.io/projected/eca136da-2294-4180-9a48-b4ac2547ed50-kube-api-access-h569b\") pod \"node-ca-d9hjh\" (UID: \"eca136da-2294-4180-9a48-b4ac2547ed50\") " pod="openshift-image-registry/node-ca-d9hjh" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.548865 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:33Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.570507 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.570539 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.570548 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.570563 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.570572 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:33Z","lastTransitionTime":"2025-12-02T11:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.593704 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:33Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.599639 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-d9hjh" Dec 02 11:51:33 crc kubenswrapper[4721]: W1202 11:51:33.615535 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeca136da_2294_4180_9a48_b4ac2547ed50.slice/crio-16fdc5962738234f36e09d12b486c35eeca55335e7f326a11e71904a395e6542 WatchSource:0}: Error finding container 16fdc5962738234f36e09d12b486c35eeca55335e7f326a11e71904a395e6542: Status 404 returned error can't find the container with id 16fdc5962738234f36e09d12b486c35eeca55335e7f326a11e71904a395e6542 Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.628719 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:33Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.668456 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:33Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.674741 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.674789 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.674802 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.674827 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.674841 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:33Z","lastTransitionTime":"2025-12-02T11:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.707010 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:33Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.750929 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:33Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.777705 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.777836 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.777900 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.777959 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.778010 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:33Z","lastTransitionTime":"2025-12-02T11:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.788015 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:33Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.829643 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:33Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.839021 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.839070 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:51:33 crc kubenswrapper[4721]: E1202 11:51:33.839118 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.839072 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:51:33 crc kubenswrapper[4721]: E1202 11:51:33.839174 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:51:33 crc kubenswrapper[4721]: E1202 11:51:33.839280 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.864843 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:33Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.879971 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.879997 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.880005 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.880019 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.880028 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:33Z","lastTransitionTime":"2025-12-02T11:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.911952 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:33Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.947356 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:33Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.981914 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.981949 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.981963 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.981979 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.981990 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:33Z","lastTransitionTime":"2025-12-02T11:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.988814 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:33Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.996769 4721 generic.go:334] "Generic (PLEG): container finished" podID="8aa4e714-4184-4fad-ad5c-d7bbf1575cb7" containerID="783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5" exitCode=0 Dec 02 11:51:33 crc kubenswrapper[4721]: I1202 11:51:33.996832 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" event={"ID":"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7","Type":"ContainerDied","Data":"783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5"} Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.000299 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" event={"ID":"e2a903a9-d920-48ed-8450-fa2a4fdb458b","Type":"ContainerStarted","Data":"02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476"} Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.000382 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" event={"ID":"e2a903a9-d920-48ed-8450-fa2a4fdb458b","Type":"ContainerStarted","Data":"4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487"} Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.000399 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" event={"ID":"e2a903a9-d920-48ed-8450-fa2a4fdb458b","Type":"ContainerStarted","Data":"a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b"} Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.000410 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" event={"ID":"e2a903a9-d920-48ed-8450-fa2a4fdb458b","Type":"ContainerStarted","Data":"16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661"} Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.001171 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-d9hjh" event={"ID":"eca136da-2294-4180-9a48-b4ac2547ed50","Type":"ContainerStarted","Data":"16fdc5962738234f36e09d12b486c35eeca55335e7f326a11e71904a395e6542"} Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.026307 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:34Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.068228 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:34Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.084141 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.084187 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.084197 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.084213 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.084224 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:34Z","lastTransitionTime":"2025-12-02T11:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.109051 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:34Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.147992 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:34Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.186048 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.186109 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.186127 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.186150 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.186168 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:34Z","lastTransitionTime":"2025-12-02T11:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.193862 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:34Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.227201 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:34Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.268833 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:34Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.289017 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.289233 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.289430 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.289571 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.289696 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:34Z","lastTransitionTime":"2025-12-02T11:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.313636 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:34Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.350268 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:34Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.387798 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:34Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.391682 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.391732 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.391745 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.391766 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.391782 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:34Z","lastTransitionTime":"2025-12-02T11:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.430738 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:34Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.478022 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:34Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.494087 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.494122 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.494133 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.494150 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.494163 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:34Z","lastTransitionTime":"2025-12-02T11:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.514293 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:34Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.551963 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:34Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.586614 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:34Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.595752 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.595775 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.595782 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.595796 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.595804 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:34Z","lastTransitionTime":"2025-12-02T11:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.631482 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:34Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.668554 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:34Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.702851 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.702897 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.702909 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.702932 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.702944 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:34Z","lastTransitionTime":"2025-12-02T11:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.710586 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:34Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.747395 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:34Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.789670 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:34Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.805054 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.805100 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.805112 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.805132 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.805145 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:34Z","lastTransitionTime":"2025-12-02T11:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.825128 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:34Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.871079 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:34Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.907113 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.907158 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.907170 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.907187 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.907202 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:34Z","lastTransitionTime":"2025-12-02T11:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:34 crc kubenswrapper[4721]: I1202 11:51:34.913069 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:34Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.007144 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" event={"ID":"e2a903a9-d920-48ed-8450-fa2a4fdb458b","Type":"ContainerStarted","Data":"769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0"} Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.007184 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" event={"ID":"e2a903a9-d920-48ed-8450-fa2a4fdb458b","Type":"ContainerStarted","Data":"467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39"} Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.009163 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.009197 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.009208 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.009222 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.009233 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:35Z","lastTransitionTime":"2025-12-02T11:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.010164 4721 generic.go:334] "Generic (PLEG): container finished" podID="8aa4e714-4184-4fad-ad5c-d7bbf1575cb7" containerID="247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1" exitCode=0 Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.010210 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" event={"ID":"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7","Type":"ContainerDied","Data":"247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1"} Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.012092 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-d9hjh" event={"ID":"eca136da-2294-4180-9a48-b4ac2547ed50","Type":"ContainerStarted","Data":"2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0"} Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.032414 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:35Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.046218 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:35Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.058913 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:35Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.068845 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:35Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.110745 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:35Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.113258 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.113314 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.113369 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.113401 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.113416 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:35Z","lastTransitionTime":"2025-12-02T11:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.149740 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:35Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.186404 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:35Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.215293 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.215319 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.215328 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.215357 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.215366 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:35Z","lastTransitionTime":"2025-12-02T11:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.226493 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:35Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.267422 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:35Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.310509 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:35Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.317873 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.317910 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.317920 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.317935 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.317948 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:35Z","lastTransitionTime":"2025-12-02T11:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.353358 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:35Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.393138 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:35Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.421373 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.421404 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.421413 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.421426 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.421435 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:35Z","lastTransitionTime":"2025-12-02T11:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.426682 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:35Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.469041 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:35Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.511824 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:35Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.523454 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.523491 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.523500 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.523514 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.523525 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:35Z","lastTransitionTime":"2025-12-02T11:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.546678 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:35Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.587083 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:35Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.625849 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.625901 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.625918 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.625938 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.625954 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:35Z","lastTransitionTime":"2025-12-02T11:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.629504 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:35Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.670205 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:35Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.709195 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:35Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.727834 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.727872 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.727882 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.727898 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.727910 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:35Z","lastTransitionTime":"2025-12-02T11:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.748646 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:35Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.804285 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:35Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.829671 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.829707 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.829715 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.829728 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.829737 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:35Z","lastTransitionTime":"2025-12-02T11:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.830468 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:35Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.838797 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.838847 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:51:35 crc kubenswrapper[4721]: E1202 11:51:35.838931 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.838959 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:51:35 crc kubenswrapper[4721]: E1202 11:51:35.839033 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:51:35 crc kubenswrapper[4721]: E1202 11:51:35.839154 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.867361 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:35Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.910054 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:35Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.931663 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.931703 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.931737 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.931760 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.931779 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:35Z","lastTransitionTime":"2025-12-02T11:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.947674 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:35Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:35 crc kubenswrapper[4721]: I1202 11:51:35.987965 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:35Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.017236 4721 generic.go:334] "Generic (PLEG): container finished" podID="8aa4e714-4184-4fad-ad5c-d7bbf1575cb7" containerID="19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565" exitCode=0 Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.017310 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" event={"ID":"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7","Type":"ContainerDied","Data":"19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565"} Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.032268 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:36Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.035003 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.035051 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.035067 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.035088 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.035104 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:36Z","lastTransitionTime":"2025-12-02T11:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.071715 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:36Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.114154 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:36Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.136788 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.136837 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.136850 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.136868 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.136881 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:36Z","lastTransitionTime":"2025-12-02T11:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.148393 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:36Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.189921 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:36Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.228515 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:36Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.240707 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.240743 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.240754 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.240770 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.240781 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:36Z","lastTransitionTime":"2025-12-02T11:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.286423 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:36Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.314259 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:36Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.342776 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.342812 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.342820 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.342835 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.342844 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:36Z","lastTransitionTime":"2025-12-02T11:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.346887 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:36Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.388336 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:36Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.432568 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:36Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.445712 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.445744 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.445757 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.445772 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.445782 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:36Z","lastTransitionTime":"2025-12-02T11:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.466284 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:36Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.506584 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:36Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.547390 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:36Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.548306 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.548353 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.548363 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.548376 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.548385 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:36Z","lastTransitionTime":"2025-12-02T11:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.592593 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:36Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.636370 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:36Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.650981 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.651025 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.651042 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.651060 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.651071 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:36Z","lastTransitionTime":"2025-12-02T11:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.666638 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:36Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.708178 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:36Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.753363 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.753492 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.753508 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.753539 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.753553 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:36Z","lastTransitionTime":"2025-12-02T11:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.855599 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.855634 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.855644 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.855658 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.855669 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:36Z","lastTransitionTime":"2025-12-02T11:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.958670 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.958705 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.958714 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.958728 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:36 crc kubenswrapper[4721]: I1202 11:51:36.958737 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:36Z","lastTransitionTime":"2025-12-02T11:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.022560 4721 generic.go:334] "Generic (PLEG): container finished" podID="8aa4e714-4184-4fad-ad5c-d7bbf1575cb7" containerID="8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e" exitCode=0 Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.022620 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" event={"ID":"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7","Type":"ContainerDied","Data":"8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e"} Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.026610 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" event={"ID":"e2a903a9-d920-48ed-8450-fa2a4fdb458b","Type":"ContainerStarted","Data":"527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac"} Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.035889 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:37Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.051652 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:37Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.061224 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.061263 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.061275 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.061292 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.061305 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:37Z","lastTransitionTime":"2025-12-02T11:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.076214 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:37Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.093835 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:37Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.106063 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:37Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.117829 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:37Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.128291 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:37Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.138674 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:37Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.154884 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:37Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.164304 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.164372 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.164386 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.164403 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.164418 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:37Z","lastTransitionTime":"2025-12-02T11:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.165471 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:37Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.175669 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:37Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.185868 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:37Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.233613 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:37Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.266092 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:37Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.267894 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.267930 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.267939 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.267954 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.267966 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:37Z","lastTransitionTime":"2025-12-02T11:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.306424 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:37Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.370597 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.370644 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.370655 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.370668 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.370676 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:37Z","lastTransitionTime":"2025-12-02T11:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.472953 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.472995 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.473007 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.473023 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.473036 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:37Z","lastTransitionTime":"2025-12-02T11:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.529196 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.529294 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.529405 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.529440 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.529481 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:51:37 crc kubenswrapper[4721]: E1202 11:51:37.529522 4721 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 11:51:37 crc kubenswrapper[4721]: E1202 11:51:37.529545 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:51:45.529503982 +0000 UTC m=+35.268054331 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:51:37 crc kubenswrapper[4721]: E1202 11:51:37.529562 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 11:51:37 crc kubenswrapper[4721]: E1202 11:51:37.529593 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 11:51:45.529573775 +0000 UTC m=+35.268124134 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 11:51:37 crc kubenswrapper[4721]: E1202 11:51:37.529592 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 11:51:37 crc kubenswrapper[4721]: E1202 11:51:37.529602 4721 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 11:51:37 crc kubenswrapper[4721]: E1202 11:51:37.529662 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 11:51:45.529646597 +0000 UTC m=+35.268196946 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 11:51:37 crc kubenswrapper[4721]: E1202 11:51:37.529605 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 11:51:37 crc kubenswrapper[4721]: E1202 11:51:37.529683 4721 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:51:37 crc kubenswrapper[4721]: E1202 11:51:37.529615 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 11:51:37 crc kubenswrapper[4721]: E1202 11:51:37.529706 4721 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:51:37 crc kubenswrapper[4721]: E1202 11:51:37.529723 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 11:51:45.529709829 +0000 UTC m=+35.268260188 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:51:37 crc kubenswrapper[4721]: E1202 11:51:37.529745 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 11:51:45.52973769 +0000 UTC m=+35.268288039 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.574654 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.574679 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.574690 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.574702 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.574711 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:37Z","lastTransitionTime":"2025-12-02T11:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.677225 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.677262 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.677271 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.677289 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.677300 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:37Z","lastTransitionTime":"2025-12-02T11:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.779434 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.779507 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.779526 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.779552 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.779570 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:37Z","lastTransitionTime":"2025-12-02T11:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.839279 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.839325 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.839398 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:51:37 crc kubenswrapper[4721]: E1202 11:51:37.839505 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:51:37 crc kubenswrapper[4721]: E1202 11:51:37.839610 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:51:37 crc kubenswrapper[4721]: E1202 11:51:37.839729 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.881922 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.882002 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.882056 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.882090 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.882127 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:37Z","lastTransitionTime":"2025-12-02T11:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.984898 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.984976 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.985002 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.985032 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:37 crc kubenswrapper[4721]: I1202 11:51:37.985056 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:37Z","lastTransitionTime":"2025-12-02T11:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.043127 4721 generic.go:334] "Generic (PLEG): container finished" podID="8aa4e714-4184-4fad-ad5c-d7bbf1575cb7" containerID="1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d" exitCode=0 Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.043173 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" event={"ID":"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7","Type":"ContainerDied","Data":"1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d"} Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.062518 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:38Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.076625 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:38Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.088735 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.088793 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.088812 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.088835 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.088851 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:38Z","lastTransitionTime":"2025-12-02T11:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.088914 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:38Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.102018 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:38Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.112758 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:38Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.131768 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:38Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.146229 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:38Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.171710 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:38Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.182466 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:38Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.191198 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.191252 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.191271 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.191291 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.191305 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:38Z","lastTransitionTime":"2025-12-02T11:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.192367 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:38Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.202440 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:38Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.219147 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:38Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.230319 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:38Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.241607 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:38Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.253691 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:38Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.293705 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.293749 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.293762 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.293780 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.293792 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:38Z","lastTransitionTime":"2025-12-02T11:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.396370 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.396406 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.396414 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.396427 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.396436 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:38Z","lastTransitionTime":"2025-12-02T11:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.499415 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.499456 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.499465 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.499481 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.499490 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:38Z","lastTransitionTime":"2025-12-02T11:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.603070 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.603151 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.603175 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.603209 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.603231 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:38Z","lastTransitionTime":"2025-12-02T11:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.706912 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.707165 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.707229 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.707293 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.707392 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:38Z","lastTransitionTime":"2025-12-02T11:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.809615 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.809686 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.809706 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.809734 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.809752 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:38Z","lastTransitionTime":"2025-12-02T11:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.912080 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.912113 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.912121 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.912137 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:38 crc kubenswrapper[4721]: I1202 11:51:38.912147 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:38Z","lastTransitionTime":"2025-12-02T11:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.014950 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.014987 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.014998 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.015017 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.015028 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:39Z","lastTransitionTime":"2025-12-02T11:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.054110 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" event={"ID":"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7","Type":"ContainerStarted","Data":"d080fba3e6836b969d692aba325a32aaa82bdd9b6a28e5d893c334bed0568748"} Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.067591 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" event={"ID":"e2a903a9-d920-48ed-8450-fa2a4fdb458b","Type":"ContainerStarted","Data":"6a054bb9199392e567962d2c6650a94391b73feb4339b303f1cdf046ed66390b"} Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.067946 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.067977 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.070989 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.088147 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.097854 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.097979 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.108324 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.117633 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.117671 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.117680 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.117694 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.117703 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:39Z","lastTransitionTime":"2025-12-02T11:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.123359 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.136313 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.147126 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.158456 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.169817 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.186502 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d080fba3e6836b969d692aba325a32aaa82bdd9b6a28e5d893c334bed0568748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.196212 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.206433 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.214655 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.219614 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.219654 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.219664 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.219680 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.219691 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:39Z","lastTransitionTime":"2025-12-02T11:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.231394 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.242419 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.253024 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.263251 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.273143 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.284067 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.294754 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.309250 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.319025 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.321428 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.321458 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.321470 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.321485 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.321496 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:39Z","lastTransitionTime":"2025-12-02T11:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.331477 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d080fba3e6836b969d692aba325a32aaa82bdd9b6a28e5d893c334bed0568748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.348361 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a054bb9199392e567962d2c6650a94391b73feb4339b303f1cdf046ed66390b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.349525 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.349563 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.349575 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.349591 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.349602 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:39Z","lastTransitionTime":"2025-12-02T11:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.358217 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: E1202 11:51:39.362016 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.364793 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.364821 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.364831 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.364847 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.364858 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:39Z","lastTransitionTime":"2025-12-02T11:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.369439 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: E1202 11:51:39.377147 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.380137 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.380711 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.380739 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.380747 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.380760 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.380769 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:39Z","lastTransitionTime":"2025-12-02T11:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:39 crc kubenswrapper[4721]: E1202 11:51:39.395281 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.398254 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.398286 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.398296 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.398308 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.398317 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:39Z","lastTransitionTime":"2025-12-02T11:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.399624 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: E1202 11:51:39.410676 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.414199 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.414240 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.414257 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.414279 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.414294 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:39Z","lastTransitionTime":"2025-12-02T11:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.418381 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: E1202 11:51:39.427206 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: E1202 11:51:39.427313 4721 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.428868 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.428899 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.428910 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.428927 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.428942 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:39Z","lastTransitionTime":"2025-12-02T11:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.432040 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.442053 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:39Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.530858 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.530901 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.530913 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.530934 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.530947 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:39Z","lastTransitionTime":"2025-12-02T11:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.633554 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.633600 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.633619 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.633642 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.633660 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:39Z","lastTransitionTime":"2025-12-02T11:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.736101 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.736157 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.736176 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.736202 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.736218 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:39Z","lastTransitionTime":"2025-12-02T11:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.838600 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.838638 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.838650 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.838667 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.838676 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.838678 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:39Z","lastTransitionTime":"2025-12-02T11:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.838737 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.839122 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:51:39 crc kubenswrapper[4721]: E1202 11:51:39.839288 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:51:39 crc kubenswrapper[4721]: E1202 11:51:39.839423 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:51:39 crc kubenswrapper[4721]: E1202 11:51:39.839527 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.941585 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.941626 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.941637 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.941656 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:39 crc kubenswrapper[4721]: I1202 11:51:39.941669 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:39Z","lastTransitionTime":"2025-12-02T11:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.043685 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.043747 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.043767 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.043792 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.043823 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:40Z","lastTransitionTime":"2025-12-02T11:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.069998 4721 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.146391 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.146438 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.146449 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.146468 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.146482 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:40Z","lastTransitionTime":"2025-12-02T11:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.248924 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.248966 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.248978 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.248996 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.249009 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:40Z","lastTransitionTime":"2025-12-02T11:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.351375 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.351407 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.351415 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.351428 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.351436 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:40Z","lastTransitionTime":"2025-12-02T11:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.453052 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.453077 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.453085 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.453097 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.453105 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:40Z","lastTransitionTime":"2025-12-02T11:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.555239 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.555493 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.555586 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.555649 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.555716 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:40Z","lastTransitionTime":"2025-12-02T11:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.626638 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.637983 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.647706 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.659030 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.659073 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.659086 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.659105 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.659115 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:40Z","lastTransitionTime":"2025-12-02T11:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.659570 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.677615 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.690516 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.701047 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.709617 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.719638 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.733987 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d080fba3e6836b969d692aba325a32aaa82bdd9b6a28e5d893c334bed0568748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.747901 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.760954 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.761555 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.762135 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.762237 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.762376 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.762488 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:40Z","lastTransitionTime":"2025-12-02T11:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.773270 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.791270 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.806370 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.828263 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a054bb9199392e567962d2c6650a94391b73feb4339b303f1cdf046ed66390b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.864609 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.864651 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.864661 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.864678 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.864713 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:40Z","lastTransitionTime":"2025-12-02T11:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.895820 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.917683 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.934763 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.952116 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.964710 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.967824 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.967846 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.967854 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.967866 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.967875 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:40Z","lastTransitionTime":"2025-12-02T11:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.979119 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.989708 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:40 crc kubenswrapper[4721]: I1202 11:51:40.998589 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.008085 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:41Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.020455 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:41Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.030886 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:41Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.048146 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d080fba3e6836b969d692aba325a32aaa82bdd9b6a28e5d893c334bed0568748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:41Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.065546 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a054bb9199392e567962d2c6650a94391b73feb4339b303f1cdf046ed66390b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:41Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.069694 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.069730 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.069741 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.069756 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.069765 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:41Z","lastTransitionTime":"2025-12-02T11:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.071706 4721 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.077536 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:41Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.089724 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:41Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.171660 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.171698 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.171707 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.171724 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.171734 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:41Z","lastTransitionTime":"2025-12-02T11:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.273271 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.273308 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.273317 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.273331 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.273357 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:41Z","lastTransitionTime":"2025-12-02T11:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.375836 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.375869 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.375878 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.375891 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.375901 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:41Z","lastTransitionTime":"2025-12-02T11:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.478039 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.478071 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.478078 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.478092 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.478100 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:41Z","lastTransitionTime":"2025-12-02T11:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.580203 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.580240 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.580268 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.580285 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.580295 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:41Z","lastTransitionTime":"2025-12-02T11:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.682251 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.682288 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.682298 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.682314 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.682324 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:41Z","lastTransitionTime":"2025-12-02T11:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.787504 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.787555 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.787567 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.787620 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.787633 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:41Z","lastTransitionTime":"2025-12-02T11:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.838916 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:51:41 crc kubenswrapper[4721]: E1202 11:51:41.839025 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.839132 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:51:41 crc kubenswrapper[4721]: E1202 11:51:41.839225 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.839140 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:51:41 crc kubenswrapper[4721]: E1202 11:51:41.839329 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.889407 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.889436 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.889444 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.889458 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.889466 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:41Z","lastTransitionTime":"2025-12-02T11:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.991663 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.991713 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.991722 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.991735 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:41 crc kubenswrapper[4721]: I1202 11:51:41.991744 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:41Z","lastTransitionTime":"2025-12-02T11:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.075992 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gbxxd_e2a903a9-d920-48ed-8450-fa2a4fdb458b/ovnkube-controller/0.log" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.077949 4721 generic.go:334] "Generic (PLEG): container finished" podID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerID="6a054bb9199392e567962d2c6650a94391b73feb4339b303f1cdf046ed66390b" exitCode=1 Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.077987 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" event={"ID":"e2a903a9-d920-48ed-8450-fa2a4fdb458b","Type":"ContainerDied","Data":"6a054bb9199392e567962d2c6650a94391b73feb4339b303f1cdf046ed66390b"} Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.078837 4721 scope.go:117] "RemoveContainer" containerID="6a054bb9199392e567962d2c6650a94391b73feb4339b303f1cdf046ed66390b" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.092462 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:42Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.093867 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.093898 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.093927 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.093941 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.093953 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:42Z","lastTransitionTime":"2025-12-02T11:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.103370 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:42Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.122478 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:42Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.136378 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:42Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.145134 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:42Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.155647 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:42Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.165630 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:42Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.177030 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:42Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.190981 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d080fba3e6836b969d692aba325a32aaa82bdd9b6a28e5d893c334bed0568748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:42Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.196063 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.196113 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.196128 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.196149 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.196177 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:42Z","lastTransitionTime":"2025-12-02T11:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.205756 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:42Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.216507 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:42Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.242932 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a054bb9199392e567962d2c6650a94391b73feb4339b303f1cdf046ed66390b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a054bb9199392e567962d2c6650a94391b73feb4339b303f1cdf046ed66390b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:51:42Z\\\",\\\"message\\\":\\\"s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.379798 6020 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.379788 6020 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 11:51:41.379973 6020 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.380084 6020 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.380486 6020 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.382220 6020 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 11:51:41.382250 6020 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 11:51:41.382268 6020 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1202 11:51:41.382280 6020 handler.go:208] Removed *v1.Node event handler 2\\\\nI1202 11:51:41.382304 6020 handler.go:208] Removed *v1.Node event handler 7\\\\nI1202 11:51:41.382303 6020 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1202 11:51:41.382289 6020 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:42Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.254762 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:42Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.267228 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:42Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.278546 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:42Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.298119 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.298149 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.298158 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.298173 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.298182 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:42Z","lastTransitionTime":"2025-12-02T11:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.401059 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.401100 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.401111 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.401128 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.401142 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:42Z","lastTransitionTime":"2025-12-02T11:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.502880 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.502920 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.502929 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.502946 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.502955 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:42Z","lastTransitionTime":"2025-12-02T11:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.604795 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.604830 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.604838 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.604851 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.604861 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:42Z","lastTransitionTime":"2025-12-02T11:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.706814 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.706846 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.706855 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.706867 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.706877 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:42Z","lastTransitionTime":"2025-12-02T11:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.809026 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.809058 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.809067 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.809083 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.809093 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:42Z","lastTransitionTime":"2025-12-02T11:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.910976 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.911002 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.911010 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.911023 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:42 crc kubenswrapper[4721]: I1202 11:51:42.911031 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:42Z","lastTransitionTime":"2025-12-02T11:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.013748 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.013793 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.013807 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.013822 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.013835 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:43Z","lastTransitionTime":"2025-12-02T11:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.082160 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gbxxd_e2a903a9-d920-48ed-8450-fa2a4fdb458b/ovnkube-controller/1.log" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.083087 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gbxxd_e2a903a9-d920-48ed-8450-fa2a4fdb458b/ovnkube-controller/0.log" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.085558 4721 generic.go:334] "Generic (PLEG): container finished" podID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerID="abdebcc3a4e0d6006401bb7593699a39a2f3c3edec2597177bdf3e4b15ae47fe" exitCode=1 Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.085610 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" event={"ID":"e2a903a9-d920-48ed-8450-fa2a4fdb458b","Type":"ContainerDied","Data":"abdebcc3a4e0d6006401bb7593699a39a2f3c3edec2597177bdf3e4b15ae47fe"} Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.085657 4721 scope.go:117] "RemoveContainer" containerID="6a054bb9199392e567962d2c6650a94391b73feb4339b303f1cdf046ed66390b" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.086568 4721 scope.go:117] "RemoveContainer" containerID="abdebcc3a4e0d6006401bb7593699a39a2f3c3edec2597177bdf3e4b15ae47fe" Dec 02 11:51:43 crc kubenswrapper[4721]: E1202 11:51:43.086782 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-gbxxd_openshift-ovn-kubernetes(e2a903a9-d920-48ed-8450-fa2a4fdb458b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.101846 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.112396 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.115791 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.115814 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.115822 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.115836 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.115846 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:43Z","lastTransitionTime":"2025-12-02T11:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.122780 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.141112 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.154319 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.163128 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.172930 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.183558 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.192824 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.203839 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d080fba3e6836b969d692aba325a32aaa82bdd9b6a28e5d893c334bed0568748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.214875 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.218187 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.218218 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.218227 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.218241 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.218249 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:43Z","lastTransitionTime":"2025-12-02T11:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.232609 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://abdebcc3a4e0d6006401bb7593699a39a2f3c3edec2597177bdf3e4b15ae47fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a054bb9199392e567962d2c6650a94391b73feb4339b303f1cdf046ed66390b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:51:42Z\\\",\\\"message\\\":\\\"s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.379798 6020 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.379788 6020 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 11:51:41.379973 6020 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.380084 6020 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.380486 6020 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.382220 6020 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 11:51:41.382250 6020 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 11:51:41.382268 6020 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1202 11:51:41.382280 6020 handler.go:208] Removed *v1.Node event handler 2\\\\nI1202 11:51:41.382304 6020 handler.go:208] Removed *v1.Node event handler 7\\\\nI1202 11:51:41.382303 6020 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1202 11:51:41.382289 6020 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abdebcc3a4e0d6006401bb7593699a39a2f3c3edec2597177bdf3e4b15ae47fe\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:51:42Z\\\",\\\"message\\\":\\\"40f5e050c73}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 11:51:42.791167 6138 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1202 11:51:42.791179 6138 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI1202 11:51:42.791193 6138 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1202 11:51:42.791202 6138 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1202 11:51:42.791227 6138 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/downloads\\\\\\\"}\\\\nI1202 11:51:42.791240 6138 services_controller.go:360] Finished syncing service downloads on namespace openshift-console for network=default : 3.193901ms\\\\nI1202 11:51:42.791250 6138 services_controller.go:356] Processing sync for service openshift-ingress-operator/metrics for network=default\\\\nF1202 11:51:42.791253 6138 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.242449 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.252011 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.262176 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.320472 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.320503 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.320513 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.320527 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.320537 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:43Z","lastTransitionTime":"2025-12-02T11:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.423061 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.423104 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.423116 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.423136 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.423180 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:43Z","lastTransitionTime":"2025-12-02T11:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.500142 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s"] Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.500780 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.502720 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.504885 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.514042 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.525198 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.525253 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.525265 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.525284 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.525298 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:43Z","lastTransitionTime":"2025-12-02T11:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.525742 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.539705 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.557843 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.572693 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.584526 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.587960 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bddfc4d8-9d79-4b83-9b40-38463890e808-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-v2k4s\" (UID: \"bddfc4d8-9d79-4b83-9b40-38463890e808\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.588001 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8fbl\" (UniqueName: \"kubernetes.io/projected/bddfc4d8-9d79-4b83-9b40-38463890e808-kube-api-access-d8fbl\") pod \"ovnkube-control-plane-749d76644c-v2k4s\" (UID: \"bddfc4d8-9d79-4b83-9b40-38463890e808\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.588037 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bddfc4d8-9d79-4b83-9b40-38463890e808-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-v2k4s\" (UID: \"bddfc4d8-9d79-4b83-9b40-38463890e808\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.588180 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bddfc4d8-9d79-4b83-9b40-38463890e808-env-overrides\") pod \"ovnkube-control-plane-749d76644c-v2k4s\" (UID: \"bddfc4d8-9d79-4b83-9b40-38463890e808\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.594612 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.604167 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.618459 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d080fba3e6836b969d692aba325a32aaa82bdd9b6a28e5d893c334bed0568748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.626880 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.626908 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.626920 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.626935 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.626945 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:43Z","lastTransitionTime":"2025-12-02T11:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.629763 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.640229 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.655039 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.665873 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.678596 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.689314 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bddfc4d8-9d79-4b83-9b40-38463890e808-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-v2k4s\" (UID: \"bddfc4d8-9d79-4b83-9b40-38463890e808\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.689371 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8fbl\" (UniqueName: \"kubernetes.io/projected/bddfc4d8-9d79-4b83-9b40-38463890e808-kube-api-access-d8fbl\") pod \"ovnkube-control-plane-749d76644c-v2k4s\" (UID: \"bddfc4d8-9d79-4b83-9b40-38463890e808\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.689397 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bddfc4d8-9d79-4b83-9b40-38463890e808-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-v2k4s\" (UID: \"bddfc4d8-9d79-4b83-9b40-38463890e808\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.689425 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bddfc4d8-9d79-4b83-9b40-38463890e808-env-overrides\") pod \"ovnkube-control-plane-749d76644c-v2k4s\" (UID: \"bddfc4d8-9d79-4b83-9b40-38463890e808\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.689881 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bddfc4d8-9d79-4b83-9b40-38463890e808-env-overrides\") pod \"ovnkube-control-plane-749d76644c-v2k4s\" (UID: \"bddfc4d8-9d79-4b83-9b40-38463890e808\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.690042 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bddfc4d8-9d79-4b83-9b40-38463890e808-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-v2k4s\" (UID: \"bddfc4d8-9d79-4b83-9b40-38463890e808\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.694961 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bddfc4d8-9d79-4b83-9b40-38463890e808-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-v2k4s\" (UID: \"bddfc4d8-9d79-4b83-9b40-38463890e808\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.700814 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://abdebcc3a4e0d6006401bb7593699a39a2f3c3edec2597177bdf3e4b15ae47fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a054bb9199392e567962d2c6650a94391b73feb4339b303f1cdf046ed66390b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:51:42Z\\\",\\\"message\\\":\\\"s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.379798 6020 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.379788 6020 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 11:51:41.379973 6020 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.380084 6020 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.380486 6020 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.382220 6020 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 11:51:41.382250 6020 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 11:51:41.382268 6020 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1202 11:51:41.382280 6020 handler.go:208] Removed *v1.Node event handler 2\\\\nI1202 11:51:41.382304 6020 handler.go:208] Removed *v1.Node event handler 7\\\\nI1202 11:51:41.382303 6020 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1202 11:51:41.382289 6020 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abdebcc3a4e0d6006401bb7593699a39a2f3c3edec2597177bdf3e4b15ae47fe\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:51:42Z\\\",\\\"message\\\":\\\"40f5e050c73}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 11:51:42.791167 6138 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1202 11:51:42.791179 6138 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI1202 11:51:42.791193 6138 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1202 11:51:42.791202 6138 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1202 11:51:42.791227 6138 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/downloads\\\\\\\"}\\\\nI1202 11:51:42.791240 6138 services_controller.go:360] Finished syncing service downloads on namespace openshift-console for network=default : 3.193901ms\\\\nI1202 11:51:42.791250 6138 services_controller.go:356] Processing sync for service openshift-ingress-operator/metrics for network=default\\\\nF1202 11:51:42.791253 6138 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.705836 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8fbl\" (UniqueName: \"kubernetes.io/projected/bddfc4d8-9d79-4b83-9b40-38463890e808-kube-api-access-d8fbl\") pod \"ovnkube-control-plane-749d76644c-v2k4s\" (UID: \"bddfc4d8-9d79-4b83-9b40-38463890e808\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.712850 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bddfc4d8-9d79-4b83-9b40-38463890e808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v2k4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:43Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.729359 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.729624 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.729697 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.729804 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.729893 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:43Z","lastTransitionTime":"2025-12-02T11:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.815800 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" Dec 02 11:51:43 crc kubenswrapper[4721]: W1202 11:51:43.825868 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbddfc4d8_9d79_4b83_9b40_38463890e808.slice/crio-4e3ce5c3274975b2a89ee7dc8d9c5df0165a366e62dd87f31b1e2c56b9a4a8fb WatchSource:0}: Error finding container 4e3ce5c3274975b2a89ee7dc8d9c5df0165a366e62dd87f31b1e2c56b9a4a8fb: Status 404 returned error can't find the container with id 4e3ce5c3274975b2a89ee7dc8d9c5df0165a366e62dd87f31b1e2c56b9a4a8fb Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.831450 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.831477 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.831485 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.831497 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.831506 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:43Z","lastTransitionTime":"2025-12-02T11:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.839110 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.839140 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.839159 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:51:43 crc kubenswrapper[4721]: E1202 11:51:43.839230 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:51:43 crc kubenswrapper[4721]: E1202 11:51:43.839273 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:51:43 crc kubenswrapper[4721]: E1202 11:51:43.839316 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.933537 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.933564 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.933572 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.933585 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:43 crc kubenswrapper[4721]: I1202 11:51:43.933595 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:43Z","lastTransitionTime":"2025-12-02T11:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.035086 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.035119 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.035129 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.035142 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.035150 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:44Z","lastTransitionTime":"2025-12-02T11:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.090446 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gbxxd_e2a903a9-d920-48ed-8450-fa2a4fdb458b/ovnkube-controller/1.log" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.094960 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" event={"ID":"bddfc4d8-9d79-4b83-9b40-38463890e808","Type":"ContainerStarted","Data":"4e3ce5c3274975b2a89ee7dc8d9c5df0165a366e62dd87f31b1e2c56b9a4a8fb"} Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.136920 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.136948 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.136956 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.136967 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.136976 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:44Z","lastTransitionTime":"2025-12-02T11:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.238999 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.239043 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.239057 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.239073 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.239083 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:44Z","lastTransitionTime":"2025-12-02T11:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.341809 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.341839 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.341847 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.341861 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.341869 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:44Z","lastTransitionTime":"2025-12-02T11:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.444598 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.444629 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.444637 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.444651 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.444660 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:44Z","lastTransitionTime":"2025-12-02T11:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.547052 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.547083 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.547091 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.547103 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.547111 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:44Z","lastTransitionTime":"2025-12-02T11:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.565456 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-qf9jp"] Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.565857 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:51:44 crc kubenswrapper[4721]: E1202 11:51:44.565910 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.584153 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:44Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.601034 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:44Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.614377 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:44Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.635220 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:44Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.648878 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:44Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.649000 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.649195 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.649204 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.649216 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.649225 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:44Z","lastTransitionTime":"2025-12-02T11:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.660741 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:44Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.671651 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:44Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.681793 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qf9jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qf9jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:44Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.692306 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:44Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.698090 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-metrics-certs\") pod \"network-metrics-daemon-qf9jp\" (UID: \"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\") " pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.698145 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4s44b\" (UniqueName: \"kubernetes.io/projected/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-kube-api-access-4s44b\") pod \"network-metrics-daemon-qf9jp\" (UID: \"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\") " pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.703766 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:44Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.717811 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:44Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.730052 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:44Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.743962 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:44Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.751434 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.751597 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.751665 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.751730 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.751788 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:44Z","lastTransitionTime":"2025-12-02T11:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.754146 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:44Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.768453 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d080fba3e6836b969d692aba325a32aaa82bdd9b6a28e5d893c334bed0568748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:44Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.778778 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bddfc4d8-9d79-4b83-9b40-38463890e808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v2k4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:44Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.799229 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4s44b\" (UniqueName: \"kubernetes.io/projected/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-kube-api-access-4s44b\") pod \"network-metrics-daemon-qf9jp\" (UID: \"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\") " pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.799465 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-metrics-certs\") pod \"network-metrics-daemon-qf9jp\" (UID: \"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\") " pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:51:44 crc kubenswrapper[4721]: E1202 11:51:44.799589 4721 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 11:51:44 crc kubenswrapper[4721]: E1202 11:51:44.799658 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-metrics-certs podName:c1b39d16-c46b-40dc-a2a6-600f96eb93f7 nodeName:}" failed. No retries permitted until 2025-12-02 11:51:45.299639717 +0000 UTC m=+35.038190066 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-metrics-certs") pod "network-metrics-daemon-qf9jp" (UID: "c1b39d16-c46b-40dc-a2a6-600f96eb93f7") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.806954 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://abdebcc3a4e0d6006401bb7593699a39a2f3c3edec2597177bdf3e4b15ae47fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a054bb9199392e567962d2c6650a94391b73feb4339b303f1cdf046ed66390b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:51:42Z\\\",\\\"message\\\":\\\"s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.379798 6020 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.379788 6020 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 11:51:41.379973 6020 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.380084 6020 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.380486 6020 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.382220 6020 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 11:51:41.382250 6020 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 11:51:41.382268 6020 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1202 11:51:41.382280 6020 handler.go:208] Removed *v1.Node event handler 2\\\\nI1202 11:51:41.382304 6020 handler.go:208] Removed *v1.Node event handler 7\\\\nI1202 11:51:41.382303 6020 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1202 11:51:41.382289 6020 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abdebcc3a4e0d6006401bb7593699a39a2f3c3edec2597177bdf3e4b15ae47fe\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:51:42Z\\\",\\\"message\\\":\\\"40f5e050c73}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 11:51:42.791167 6138 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1202 11:51:42.791179 6138 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI1202 11:51:42.791193 6138 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1202 11:51:42.791202 6138 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1202 11:51:42.791227 6138 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/downloads\\\\\\\"}\\\\nI1202 11:51:42.791240 6138 services_controller.go:360] Finished syncing service downloads on namespace openshift-console for network=default : 3.193901ms\\\\nI1202 11:51:42.791250 6138 services_controller.go:356] Processing sync for service openshift-ingress-operator/metrics for network=default\\\\nF1202 11:51:42.791253 6138 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:44Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.825076 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4s44b\" (UniqueName: \"kubernetes.io/projected/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-kube-api-access-4s44b\") pod \"network-metrics-daemon-qf9jp\" (UID: \"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\") " pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.853481 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.853550 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.853573 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.853603 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.853621 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:44Z","lastTransitionTime":"2025-12-02T11:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.956255 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.956299 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.956308 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.956323 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:44 crc kubenswrapper[4721]: I1202 11:51:44.956332 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:44Z","lastTransitionTime":"2025-12-02T11:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.059264 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.059314 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.059324 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.059363 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.059373 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:45Z","lastTransitionTime":"2025-12-02T11:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.100061 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" event={"ID":"bddfc4d8-9d79-4b83-9b40-38463890e808","Type":"ContainerStarted","Data":"962a9159d7c11d17506ad7f068479425fcbf5124ef66e41aa2e60c23a8358a31"} Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.100107 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" event={"ID":"bddfc4d8-9d79-4b83-9b40-38463890e808","Type":"ContainerStarted","Data":"23997cb093043089d5bcaf250c265456c9b5b6ac0bc52ab80b23a7264864ebfc"} Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.113067 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:45Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.128758 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:45Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.139007 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:45Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.150433 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:45Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.161502 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qf9jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qf9jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:45Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.162174 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.162211 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.162222 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.162238 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.162251 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:45Z","lastTransitionTime":"2025-12-02T11:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.180680 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:45Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.191861 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:45Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.202521 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:45Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.214506 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:45Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.227093 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:45Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.235114 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:45Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.246229 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d080fba3e6836b969d692aba325a32aaa82bdd9b6a28e5d893c334bed0568748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:45Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.257650 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:45Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.264137 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.264174 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.264187 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.264205 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.264217 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:45Z","lastTransitionTime":"2025-12-02T11:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.275167 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://abdebcc3a4e0d6006401bb7593699a39a2f3c3edec2597177bdf3e4b15ae47fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a054bb9199392e567962d2c6650a94391b73feb4339b303f1cdf046ed66390b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:51:42Z\\\",\\\"message\\\":\\\"s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.379798 6020 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.379788 6020 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 11:51:41.379973 6020 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.380084 6020 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.380486 6020 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.382220 6020 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 11:51:41.382250 6020 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 11:51:41.382268 6020 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1202 11:51:41.382280 6020 handler.go:208] Removed *v1.Node event handler 2\\\\nI1202 11:51:41.382304 6020 handler.go:208] Removed *v1.Node event handler 7\\\\nI1202 11:51:41.382303 6020 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1202 11:51:41.382289 6020 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abdebcc3a4e0d6006401bb7593699a39a2f3c3edec2597177bdf3e4b15ae47fe\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:51:42Z\\\",\\\"message\\\":\\\"40f5e050c73}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 11:51:42.791167 6138 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1202 11:51:42.791179 6138 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI1202 11:51:42.791193 6138 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1202 11:51:42.791202 6138 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1202 11:51:42.791227 6138 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/downloads\\\\\\\"}\\\\nI1202 11:51:42.791240 6138 services_controller.go:360] Finished syncing service downloads on namespace openshift-console for network=default : 3.193901ms\\\\nI1202 11:51:42.791250 6138 services_controller.go:356] Processing sync for service openshift-ingress-operator/metrics for network=default\\\\nF1202 11:51:42.791253 6138 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:45Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.286367 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bddfc4d8-9d79-4b83-9b40-38463890e808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23997cb093043089d5bcaf250c265456c9b5b6ac0bc52ab80b23a7264864ebfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://962a9159d7c11d17506ad7f068479425fcbf5124ef66e41aa2e60c23a8358a31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v2k4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:45Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.297478 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:45Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.304634 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-metrics-certs\") pod \"network-metrics-daemon-qf9jp\" (UID: \"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\") " pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:51:45 crc kubenswrapper[4721]: E1202 11:51:45.304756 4721 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 11:51:45 crc kubenswrapper[4721]: E1202 11:51:45.304803 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-metrics-certs podName:c1b39d16-c46b-40dc-a2a6-600f96eb93f7 nodeName:}" failed. No retries permitted until 2025-12-02 11:51:46.304790638 +0000 UTC m=+36.043340987 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-metrics-certs") pod "network-metrics-daemon-qf9jp" (UID: "c1b39d16-c46b-40dc-a2a6-600f96eb93f7") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.308471 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:45Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.366232 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.366264 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.366273 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.366286 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.366294 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:45Z","lastTransitionTime":"2025-12-02T11:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.468444 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.468474 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.468483 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.468494 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.468503 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:45Z","lastTransitionTime":"2025-12-02T11:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.571137 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.571176 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.571190 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.571208 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.571220 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:45Z","lastTransitionTime":"2025-12-02T11:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.607662 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.607784 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:51:45 crc kubenswrapper[4721]: E1202 11:51:45.607823 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:52:01.60779375 +0000 UTC m=+51.346344109 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.607869 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.607925 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:51:45 crc kubenswrapper[4721]: E1202 11:51:45.607926 4721 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.607958 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:51:45 crc kubenswrapper[4721]: E1202 11:51:45.608019 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 11:52:01.607997667 +0000 UTC m=+51.346548016 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 11:51:45 crc kubenswrapper[4721]: E1202 11:51:45.608115 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 11:51:45 crc kubenswrapper[4721]: E1202 11:51:45.608133 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 11:51:45 crc kubenswrapper[4721]: E1202 11:51:45.608147 4721 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:51:45 crc kubenswrapper[4721]: E1202 11:51:45.608187 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 11:52:01.608176842 +0000 UTC m=+51.346727211 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:51:45 crc kubenswrapper[4721]: E1202 11:51:45.608241 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 11:51:45 crc kubenswrapper[4721]: E1202 11:51:45.608261 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 11:51:45 crc kubenswrapper[4721]: E1202 11:51:45.608271 4721 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:51:45 crc kubenswrapper[4721]: E1202 11:51:45.608299 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 11:52:01.608290726 +0000 UTC m=+51.346841085 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:51:45 crc kubenswrapper[4721]: E1202 11:51:45.608369 4721 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 11:51:45 crc kubenswrapper[4721]: E1202 11:51:45.608435 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 11:52:01.6084199 +0000 UTC m=+51.346970239 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.673881 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.673948 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.673973 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.673997 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.674012 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:45Z","lastTransitionTime":"2025-12-02T11:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.776763 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.776807 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.776817 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.776838 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.776849 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:45Z","lastTransitionTime":"2025-12-02T11:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.839599 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.839625 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.839673 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:51:45 crc kubenswrapper[4721]: E1202 11:51:45.839731 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.839791 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:51:45 crc kubenswrapper[4721]: E1202 11:51:45.839958 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:51:45 crc kubenswrapper[4721]: E1202 11:51:45.840069 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:51:45 crc kubenswrapper[4721]: E1202 11:51:45.840162 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.879925 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.879969 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.879985 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.880011 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.880030 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:45Z","lastTransitionTime":"2025-12-02T11:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.981851 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.981923 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.981941 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.981965 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:45 crc kubenswrapper[4721]: I1202 11:51:45.981983 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:45Z","lastTransitionTime":"2025-12-02T11:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.084071 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.084109 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.084120 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.084137 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.084149 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:46Z","lastTransitionTime":"2025-12-02T11:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.186716 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.186772 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.186789 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.186807 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.186818 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:46Z","lastTransitionTime":"2025-12-02T11:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.289437 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.289523 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.289540 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.289564 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.289580 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:46Z","lastTransitionTime":"2025-12-02T11:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.313993 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-metrics-certs\") pod \"network-metrics-daemon-qf9jp\" (UID: \"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\") " pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:51:46 crc kubenswrapper[4721]: E1202 11:51:46.314121 4721 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 11:51:46 crc kubenswrapper[4721]: E1202 11:51:46.314180 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-metrics-certs podName:c1b39d16-c46b-40dc-a2a6-600f96eb93f7 nodeName:}" failed. No retries permitted until 2025-12-02 11:51:48.314165311 +0000 UTC m=+38.052715660 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-metrics-certs") pod "network-metrics-daemon-qf9jp" (UID: "c1b39d16-c46b-40dc-a2a6-600f96eb93f7") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.391461 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.391497 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.391508 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.391523 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.391533 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:46Z","lastTransitionTime":"2025-12-02T11:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.493250 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.493520 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.493583 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.493645 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.493727 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:46Z","lastTransitionTime":"2025-12-02T11:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.596004 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.596064 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.596087 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.596114 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.596134 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:46Z","lastTransitionTime":"2025-12-02T11:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.698277 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.698605 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.698627 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.698651 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.698668 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:46Z","lastTransitionTime":"2025-12-02T11:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.801847 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.801911 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.801922 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.801935 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.801971 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:46Z","lastTransitionTime":"2025-12-02T11:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.904303 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.904333 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.904358 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.904373 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:46 crc kubenswrapper[4721]: I1202 11:51:46.904385 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:46Z","lastTransitionTime":"2025-12-02T11:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.006493 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.006531 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.006541 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.006558 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.006572 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:47Z","lastTransitionTime":"2025-12-02T11:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.108428 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.108485 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.108505 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.108527 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.108545 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:47Z","lastTransitionTime":"2025-12-02T11:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.211613 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.211647 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.211657 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.211670 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.211680 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:47Z","lastTransitionTime":"2025-12-02T11:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.315012 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.315050 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.315058 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.315071 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.315079 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:47Z","lastTransitionTime":"2025-12-02T11:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.416927 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.416966 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.416974 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.417028 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.417041 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:47Z","lastTransitionTime":"2025-12-02T11:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.519856 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.519896 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.519907 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.519924 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.519935 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:47Z","lastTransitionTime":"2025-12-02T11:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.622475 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.622527 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.622539 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.622555 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.622568 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:47Z","lastTransitionTime":"2025-12-02T11:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.724955 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.725025 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.725042 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.725065 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.725084 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:47Z","lastTransitionTime":"2025-12-02T11:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.827808 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.827893 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.827928 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.827959 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.827981 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:47Z","lastTransitionTime":"2025-12-02T11:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.839678 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.839712 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.839810 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:51:47 crc kubenswrapper[4721]: E1202 11:51:47.840012 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.840061 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:51:47 crc kubenswrapper[4721]: E1202 11:51:47.840200 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:51:47 crc kubenswrapper[4721]: E1202 11:51:47.840402 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:51:47 crc kubenswrapper[4721]: E1202 11:51:47.840296 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.930561 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.930617 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.930628 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.930647 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:47 crc kubenswrapper[4721]: I1202 11:51:47.930658 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:47Z","lastTransitionTime":"2025-12-02T11:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.033290 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.033331 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.033352 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.033369 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.033377 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:48Z","lastTransitionTime":"2025-12-02T11:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.135713 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.135761 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.135771 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.135788 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.135825 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:48Z","lastTransitionTime":"2025-12-02T11:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.238034 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.238117 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.238148 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.238162 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.238173 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:48Z","lastTransitionTime":"2025-12-02T11:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.333542 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-metrics-certs\") pod \"network-metrics-daemon-qf9jp\" (UID: \"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\") " pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:51:48 crc kubenswrapper[4721]: E1202 11:51:48.333700 4721 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 11:51:48 crc kubenswrapper[4721]: E1202 11:51:48.333885 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-metrics-certs podName:c1b39d16-c46b-40dc-a2a6-600f96eb93f7 nodeName:}" failed. No retries permitted until 2025-12-02 11:51:52.333864077 +0000 UTC m=+42.072414466 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-metrics-certs") pod "network-metrics-daemon-qf9jp" (UID: "c1b39d16-c46b-40dc-a2a6-600f96eb93f7") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.341190 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.341260 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.341284 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.341312 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.341332 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:48Z","lastTransitionTime":"2025-12-02T11:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.443573 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.443626 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.443643 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.443668 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.443684 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:48Z","lastTransitionTime":"2025-12-02T11:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.546308 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.546561 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.546950 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.547031 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.547467 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:48Z","lastTransitionTime":"2025-12-02T11:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.653914 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.654098 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.654117 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.654135 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.654146 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:48Z","lastTransitionTime":"2025-12-02T11:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.757265 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.757320 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.757338 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.757389 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.757405 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:48Z","lastTransitionTime":"2025-12-02T11:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.859268 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.859337 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.859389 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.859418 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.859439 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:48Z","lastTransitionTime":"2025-12-02T11:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.962498 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.962556 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.962572 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.962595 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:48 crc kubenswrapper[4721]: I1202 11:51:48.962614 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:48Z","lastTransitionTime":"2025-12-02T11:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.064729 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.064772 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.064781 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.064797 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.064809 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:49Z","lastTransitionTime":"2025-12-02T11:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.166964 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.167022 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.167045 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.167077 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.167101 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:49Z","lastTransitionTime":"2025-12-02T11:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.269941 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.269982 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.269992 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.270007 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.270017 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:49Z","lastTransitionTime":"2025-12-02T11:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.371595 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.371628 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.371637 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.371649 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.371659 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:49Z","lastTransitionTime":"2025-12-02T11:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.473894 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.473962 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.473978 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.474004 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.474022 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:49Z","lastTransitionTime":"2025-12-02T11:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.534846 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.534912 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.534934 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.534957 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.534975 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:49Z","lastTransitionTime":"2025-12-02T11:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:49 crc kubenswrapper[4721]: E1202 11:51:49.553523 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:49Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.557272 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.557321 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.557338 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.557409 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.557431 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:49Z","lastTransitionTime":"2025-12-02T11:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:49 crc kubenswrapper[4721]: E1202 11:51:49.570040 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:49Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.574140 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.574173 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.574181 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.574196 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.574206 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:49Z","lastTransitionTime":"2025-12-02T11:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:49 crc kubenswrapper[4721]: E1202 11:51:49.592892 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:49Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.597187 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.597216 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.597226 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.597238 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.597247 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:49Z","lastTransitionTime":"2025-12-02T11:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:49 crc kubenswrapper[4721]: E1202 11:51:49.614486 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:49Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.618057 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.618108 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.618123 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.618140 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.618153 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:49Z","lastTransitionTime":"2025-12-02T11:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:49 crc kubenswrapper[4721]: E1202 11:51:49.635925 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:49Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:49 crc kubenswrapper[4721]: E1202 11:51:49.636097 4721 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.637695 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.637769 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.637793 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.637822 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.637843 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:49Z","lastTransitionTime":"2025-12-02T11:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.740838 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.740875 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.740884 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.740898 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.740907 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:49Z","lastTransitionTime":"2025-12-02T11:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.839705 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.839766 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.839720 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:51:49 crc kubenswrapper[4721]: E1202 11:51:49.839861 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.839876 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:51:49 crc kubenswrapper[4721]: E1202 11:51:49.839933 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:51:49 crc kubenswrapper[4721]: E1202 11:51:49.840029 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:51:49 crc kubenswrapper[4721]: E1202 11:51:49.840165 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.844073 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.844122 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.844141 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.844164 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.844180 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:49Z","lastTransitionTime":"2025-12-02T11:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.947170 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.947233 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.947246 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.947265 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:49 crc kubenswrapper[4721]: I1202 11:51:49.947278 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:49Z","lastTransitionTime":"2025-12-02T11:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.050910 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.051019 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.051041 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.051070 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.051092 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:50Z","lastTransitionTime":"2025-12-02T11:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.154083 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.154145 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.154156 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.154176 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.154188 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:50Z","lastTransitionTime":"2025-12-02T11:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.262625 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.262669 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.262682 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.262702 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.262719 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:50Z","lastTransitionTime":"2025-12-02T11:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.366409 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.366471 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.366482 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.366501 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.366514 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:50Z","lastTransitionTime":"2025-12-02T11:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.469149 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.469212 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.469228 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.469252 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.469274 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:50Z","lastTransitionTime":"2025-12-02T11:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.571849 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.571910 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.571921 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.571942 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.571954 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:50Z","lastTransitionTime":"2025-12-02T11:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.675531 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.675605 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.675620 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.675638 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.675650 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:50Z","lastTransitionTime":"2025-12-02T11:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.778201 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.778555 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.778684 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.778765 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.778843 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:50Z","lastTransitionTime":"2025-12-02T11:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.855644 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:50Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.873092 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:50Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.881659 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.881885 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.882230 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.882329 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.882456 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:50Z","lastTransitionTime":"2025-12-02T11:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.896864 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:50Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.915556 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:50Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.963771 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:50Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.992586 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.992631 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.992642 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.992660 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.992673 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:50Z","lastTransitionTime":"2025-12-02T11:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:50 crc kubenswrapper[4721]: I1202 11:51:50.996625 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qf9jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qf9jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:50Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.016058 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:51Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.032894 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d080fba3e6836b969d692aba325a32aaa82bdd9b6a28e5d893c334bed0568748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:51Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.048593 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:51Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.061308 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:51Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.072396 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:51Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.085317 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:51Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.095526 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.095571 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.095581 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.095599 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.095609 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:51Z","lastTransitionTime":"2025-12-02T11:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.105432 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:51Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.132529 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://abdebcc3a4e0d6006401bb7593699a39a2f3c3edec2597177bdf3e4b15ae47fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6a054bb9199392e567962d2c6650a94391b73feb4339b303f1cdf046ed66390b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:51:42Z\\\",\\\"message\\\":\\\"s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.379798 6020 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.379788 6020 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 11:51:41.379973 6020 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.380084 6020 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.380486 6020 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1202 11:51:41.382220 6020 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 11:51:41.382250 6020 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 11:51:41.382268 6020 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1202 11:51:41.382280 6020 handler.go:208] Removed *v1.Node event handler 2\\\\nI1202 11:51:41.382304 6020 handler.go:208] Removed *v1.Node event handler 7\\\\nI1202 11:51:41.382303 6020 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1202 11:51:41.382289 6020 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abdebcc3a4e0d6006401bb7593699a39a2f3c3edec2597177bdf3e4b15ae47fe\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:51:42Z\\\",\\\"message\\\":\\\"40f5e050c73}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 11:51:42.791167 6138 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1202 11:51:42.791179 6138 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI1202 11:51:42.791193 6138 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1202 11:51:42.791202 6138 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1202 11:51:42.791227 6138 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/downloads\\\\\\\"}\\\\nI1202 11:51:42.791240 6138 services_controller.go:360] Finished syncing service downloads on namespace openshift-console for network=default : 3.193901ms\\\\nI1202 11:51:42.791250 6138 services_controller.go:356] Processing sync for service openshift-ingress-operator/metrics for network=default\\\\nF1202 11:51:42.791253 6138 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:51Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.149101 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bddfc4d8-9d79-4b83-9b40-38463890e808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23997cb093043089d5bcaf250c265456c9b5b6ac0bc52ab80b23a7264864ebfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://962a9159d7c11d17506ad7f068479425fcbf5124ef66e41aa2e60c23a8358a31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v2k4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:51Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.167797 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:51Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.182263 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:51Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.199419 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.199607 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.199695 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.199772 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.199878 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:51Z","lastTransitionTime":"2025-12-02T11:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.302985 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.303265 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.303326 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.303411 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.303467 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:51Z","lastTransitionTime":"2025-12-02T11:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.406107 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.407155 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.407313 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.407470 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.407639 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:51Z","lastTransitionTime":"2025-12-02T11:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.510926 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.510983 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.510994 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.511009 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.511018 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:51Z","lastTransitionTime":"2025-12-02T11:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.614155 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.614475 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.614589 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.614664 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.614722 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:51Z","lastTransitionTime":"2025-12-02T11:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.716598 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.716632 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.716640 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.716655 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.716666 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:51Z","lastTransitionTime":"2025-12-02T11:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.819241 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.819289 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.819297 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.819313 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.819324 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:51Z","lastTransitionTime":"2025-12-02T11:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.839663 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.839721 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.839756 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.839781 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:51:51 crc kubenswrapper[4721]: E1202 11:51:51.839896 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:51:51 crc kubenswrapper[4721]: E1202 11:51:51.840161 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:51:51 crc kubenswrapper[4721]: E1202 11:51:51.840291 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:51:51 crc kubenswrapper[4721]: E1202 11:51:51.840417 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.922269 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.922591 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.922658 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.922722 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:51 crc kubenswrapper[4721]: I1202 11:51:51.922797 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:51Z","lastTransitionTime":"2025-12-02T11:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.025471 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.025530 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.025544 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.025563 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.025577 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:52Z","lastTransitionTime":"2025-12-02T11:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.127734 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.127770 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.127778 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.127791 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.127799 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:52Z","lastTransitionTime":"2025-12-02T11:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.231016 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.231063 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.231075 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.231094 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.231106 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:52Z","lastTransitionTime":"2025-12-02T11:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.333275 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.333320 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.333332 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.333370 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.333382 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:52Z","lastTransitionTime":"2025-12-02T11:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.378708 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-metrics-certs\") pod \"network-metrics-daemon-qf9jp\" (UID: \"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\") " pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:51:52 crc kubenswrapper[4721]: E1202 11:51:52.378851 4721 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 11:51:52 crc kubenswrapper[4721]: E1202 11:51:52.378948 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-metrics-certs podName:c1b39d16-c46b-40dc-a2a6-600f96eb93f7 nodeName:}" failed. No retries permitted until 2025-12-02 11:52:00.37892506 +0000 UTC m=+50.117475429 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-metrics-certs") pod "network-metrics-daemon-qf9jp" (UID: "c1b39d16-c46b-40dc-a2a6-600f96eb93f7") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.435640 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.435672 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.435680 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.435692 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.435700 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:52Z","lastTransitionTime":"2025-12-02T11:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.538537 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.538603 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.538620 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.538645 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.538661 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:52Z","lastTransitionTime":"2025-12-02T11:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.641019 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.641078 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.641096 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.641116 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.641130 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:52Z","lastTransitionTime":"2025-12-02T11:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.742928 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.742962 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.742970 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.742983 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.742991 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:52Z","lastTransitionTime":"2025-12-02T11:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.845333 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.845453 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.845483 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.845512 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.845532 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:52Z","lastTransitionTime":"2025-12-02T11:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.948001 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.948086 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.948110 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.948144 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:52 crc kubenswrapper[4721]: I1202 11:51:52.948172 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:52Z","lastTransitionTime":"2025-12-02T11:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.051320 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.051375 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.051384 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.051398 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.051409 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:53Z","lastTransitionTime":"2025-12-02T11:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.154735 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.154793 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.154810 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.154832 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.154847 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:53Z","lastTransitionTime":"2025-12-02T11:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.258324 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.258413 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.258426 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.258447 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.258463 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:53Z","lastTransitionTime":"2025-12-02T11:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.362030 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.362099 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.362120 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.362149 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.362174 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:53Z","lastTransitionTime":"2025-12-02T11:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.464826 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.464857 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.464865 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.464877 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.464893 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:53Z","lastTransitionTime":"2025-12-02T11:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.568238 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.568297 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.568315 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.568368 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.568386 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:53Z","lastTransitionTime":"2025-12-02T11:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.671013 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.671075 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.671093 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.671116 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.671133 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:53Z","lastTransitionTime":"2025-12-02T11:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.773516 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.773581 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.773592 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.773608 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.773618 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:53Z","lastTransitionTime":"2025-12-02T11:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.839269 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.839423 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.839479 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.839485 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:51:53 crc kubenswrapper[4721]: E1202 11:51:53.839441 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:51:53 crc kubenswrapper[4721]: E1202 11:51:53.839624 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:51:53 crc kubenswrapper[4721]: E1202 11:51:53.839831 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:51:53 crc kubenswrapper[4721]: E1202 11:51:53.839901 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.875845 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.875900 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.875912 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.875930 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.875942 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:53Z","lastTransitionTime":"2025-12-02T11:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.978698 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.978740 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.978749 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.978761 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:53 crc kubenswrapper[4721]: I1202 11:51:53.978769 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:53Z","lastTransitionTime":"2025-12-02T11:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.081602 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.081636 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.081644 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.081656 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.081664 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:54Z","lastTransitionTime":"2025-12-02T11:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.184113 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.184140 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.184149 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.184160 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.184168 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:54Z","lastTransitionTime":"2025-12-02T11:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.286523 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.286597 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.286626 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.286655 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.286692 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:54Z","lastTransitionTime":"2025-12-02T11:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.389200 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.389237 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.389246 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.389259 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.389268 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:54Z","lastTransitionTime":"2025-12-02T11:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.491770 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.491811 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.491822 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.491840 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.491852 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:54Z","lastTransitionTime":"2025-12-02T11:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.593834 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.593873 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.593883 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.593898 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.593909 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:54Z","lastTransitionTime":"2025-12-02T11:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.696228 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.696265 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.696273 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.696286 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.696295 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:54Z","lastTransitionTime":"2025-12-02T11:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.799109 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.799155 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.799165 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.799180 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.799191 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:54Z","lastTransitionTime":"2025-12-02T11:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.902028 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.902085 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.902102 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.902125 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:54 crc kubenswrapper[4721]: I1202 11:51:54.902141 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:54Z","lastTransitionTime":"2025-12-02T11:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.004819 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.004880 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.004901 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.004924 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.004966 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:55Z","lastTransitionTime":"2025-12-02T11:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.106537 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.106584 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.106597 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.106614 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.106627 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:55Z","lastTransitionTime":"2025-12-02T11:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.209983 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.210035 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.210051 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.210071 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.210086 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:55Z","lastTransitionTime":"2025-12-02T11:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.312337 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.312399 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.312408 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.312423 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.312436 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:55Z","lastTransitionTime":"2025-12-02T11:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.415202 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.415233 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.415241 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.415254 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.415262 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:55Z","lastTransitionTime":"2025-12-02T11:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.517912 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.517991 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.518013 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.518044 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.518066 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:55Z","lastTransitionTime":"2025-12-02T11:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.620222 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.620279 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.620288 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.620304 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.620312 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:55Z","lastTransitionTime":"2025-12-02T11:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.723401 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.723444 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.723453 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.723466 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.723475 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:55Z","lastTransitionTime":"2025-12-02T11:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.826211 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.826271 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.826291 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.826318 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.826337 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:55Z","lastTransitionTime":"2025-12-02T11:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.839555 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.839553 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.839613 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.839613 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:51:55 crc kubenswrapper[4721]: E1202 11:51:55.839776 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:51:55 crc kubenswrapper[4721]: E1202 11:51:55.839975 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:51:55 crc kubenswrapper[4721]: E1202 11:51:55.840104 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:51:55 crc kubenswrapper[4721]: E1202 11:51:55.840221 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.841308 4721 scope.go:117] "RemoveContainer" containerID="abdebcc3a4e0d6006401bb7593699a39a2f3c3edec2597177bdf3e4b15ae47fe" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.858242 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:55Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.871738 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qf9jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qf9jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:55Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.893961 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:55Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.914192 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:55Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.928721 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.928757 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.928765 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.928779 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.928788 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:55Z","lastTransitionTime":"2025-12-02T11:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.931251 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:55Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.944200 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:55Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.956244 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:55Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.967250 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:55Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:55 crc kubenswrapper[4721]: I1202 11:51:55.985776 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d080fba3e6836b969d692aba325a32aaa82bdd9b6a28e5d893c334bed0568748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:55Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.002477 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:56Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.015801 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:56Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.025537 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:56Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.030501 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.030531 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.030543 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.030558 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.030568 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:56Z","lastTransitionTime":"2025-12-02T11:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.038454 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:56Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.063542 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://abdebcc3a4e0d6006401bb7593699a39a2f3c3edec2597177bdf3e4b15ae47fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abdebcc3a4e0d6006401bb7593699a39a2f3c3edec2597177bdf3e4b15ae47fe\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:51:42Z\\\",\\\"message\\\":\\\"40f5e050c73}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 11:51:42.791167 6138 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1202 11:51:42.791179 6138 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI1202 11:51:42.791193 6138 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1202 11:51:42.791202 6138 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1202 11:51:42.791227 6138 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/downloads\\\\\\\"}\\\\nI1202 11:51:42.791240 6138 services_controller.go:360] Finished syncing service downloads on namespace openshift-console for network=default : 3.193901ms\\\\nI1202 11:51:42.791250 6138 services_controller.go:356] Processing sync for service openshift-ingress-operator/metrics for network=default\\\\nF1202 11:51:42.791253 6138 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-gbxxd_openshift-ovn-kubernetes(e2a903a9-d920-48ed-8450-fa2a4fdb458b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:56Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.075846 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bddfc4d8-9d79-4b83-9b40-38463890e808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23997cb093043089d5bcaf250c265456c9b5b6ac0bc52ab80b23a7264864ebfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://962a9159d7c11d17506ad7f068479425fcbf5124ef66e41aa2e60c23a8358a31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v2k4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:56Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.087454 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:56Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.098754 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:56Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.132812 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.132849 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.132858 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.132873 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.132881 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:56Z","lastTransitionTime":"2025-12-02T11:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.138658 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gbxxd_e2a903a9-d920-48ed-8450-fa2a4fdb458b/ovnkube-controller/1.log" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.141187 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" event={"ID":"e2a903a9-d920-48ed-8450-fa2a4fdb458b","Type":"ContainerStarted","Data":"0983689d174a5aa7c091c0c556c3267513aaad4e56227d986a99dfb4d81d3442"} Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.141309 4721 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.155538 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:56Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.169063 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d080fba3e6836b969d692aba325a32aaa82bdd9b6a28e5d893c334bed0568748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:56Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.183222 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:56Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.194602 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:56Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.205033 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:56Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.219938 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:56Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.233017 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:56Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.234747 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.234803 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.234820 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.234842 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.234856 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:56Z","lastTransitionTime":"2025-12-02T11:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.253845 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0983689d174a5aa7c091c0c556c3267513aaad4e56227d986a99dfb4d81d3442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abdebcc3a4e0d6006401bb7593699a39a2f3c3edec2597177bdf3e4b15ae47fe\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:51:42Z\\\",\\\"message\\\":\\\"40f5e050c73}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 11:51:42.791167 6138 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1202 11:51:42.791179 6138 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI1202 11:51:42.791193 6138 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1202 11:51:42.791202 6138 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1202 11:51:42.791227 6138 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/downloads\\\\\\\"}\\\\nI1202 11:51:42.791240 6138 services_controller.go:360] Finished syncing service downloads on namespace openshift-console for network=default : 3.193901ms\\\\nI1202 11:51:42.791250 6138 services_controller.go:356] Processing sync for service openshift-ingress-operator/metrics for network=default\\\\nF1202 11:51:42.791253 6138 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:56Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.269560 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bddfc4d8-9d79-4b83-9b40-38463890e808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23997cb093043089d5bcaf250c265456c9b5b6ac0bc52ab80b23a7264864ebfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://962a9159d7c11d17506ad7f068479425fcbf5124ef66e41aa2e60c23a8358a31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v2k4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:56Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.281426 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:56Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.295680 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:56Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.337972 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.338005 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.338014 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.338028 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.338037 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:56Z","lastTransitionTime":"2025-12-02T11:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.345432 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:56Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.366048 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:56Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.378788 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:56Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.389310 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:56Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.397766 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:56Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.405714 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qf9jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qf9jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:56Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.439882 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.439932 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.439941 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.439956 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.439999 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:56Z","lastTransitionTime":"2025-12-02T11:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.541764 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.541819 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.541837 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.541861 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.541877 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:56Z","lastTransitionTime":"2025-12-02T11:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.643968 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.644336 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.644565 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.644730 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.644879 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:56Z","lastTransitionTime":"2025-12-02T11:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.747275 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.747325 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.747335 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.747374 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.747384 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:56Z","lastTransitionTime":"2025-12-02T11:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.848723 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.848756 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.848767 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.848782 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.848792 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:56Z","lastTransitionTime":"2025-12-02T11:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.951537 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.951576 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.951587 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.951603 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:56 crc kubenswrapper[4721]: I1202 11:51:56.951615 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:56Z","lastTransitionTime":"2025-12-02T11:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.053774 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.053811 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.053819 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.053834 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.053843 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:57Z","lastTransitionTime":"2025-12-02T11:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.145400 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gbxxd_e2a903a9-d920-48ed-8450-fa2a4fdb458b/ovnkube-controller/2.log" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.145978 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gbxxd_e2a903a9-d920-48ed-8450-fa2a4fdb458b/ovnkube-controller/1.log" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.148236 4721 generic.go:334] "Generic (PLEG): container finished" podID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerID="0983689d174a5aa7c091c0c556c3267513aaad4e56227d986a99dfb4d81d3442" exitCode=1 Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.148284 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" event={"ID":"e2a903a9-d920-48ed-8450-fa2a4fdb458b","Type":"ContainerDied","Data":"0983689d174a5aa7c091c0c556c3267513aaad4e56227d986a99dfb4d81d3442"} Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.148322 4721 scope.go:117] "RemoveContainer" containerID="abdebcc3a4e0d6006401bb7593699a39a2f3c3edec2597177bdf3e4b15ae47fe" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.149036 4721 scope.go:117] "RemoveContainer" containerID="0983689d174a5aa7c091c0c556c3267513aaad4e56227d986a99dfb4d81d3442" Dec 02 11:51:57 crc kubenswrapper[4721]: E1202 11:51:57.149224 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-gbxxd_openshift-ovn-kubernetes(e2a903a9-d920-48ed-8450-fa2a4fdb458b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.155385 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.155415 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.155423 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.155436 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.155446 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:57Z","lastTransitionTime":"2025-12-02T11:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.162941 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d080fba3e6836b969d692aba325a32aaa82bdd9b6a28e5d893c334bed0568748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:57Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.174675 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:57Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.185772 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:57Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.194441 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:57Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.205153 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:57Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.215633 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:57Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.225993 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:57Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.242549 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0983689d174a5aa7c091c0c556c3267513aaad4e56227d986a99dfb4d81d3442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://abdebcc3a4e0d6006401bb7593699a39a2f3c3edec2597177bdf3e4b15ae47fe\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:51:42Z\\\",\\\"message\\\":\\\"40f5e050c73}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 11:51:42.791167 6138 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1202 11:51:42.791179 6138 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI1202 11:51:42.791193 6138 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1202 11:51:42.791202 6138 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1202 11:51:42.791227 6138 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/downloads\\\\\\\"}\\\\nI1202 11:51:42.791240 6138 services_controller.go:360] Finished syncing service downloads on namespace openshift-console for network=default : 3.193901ms\\\\nI1202 11:51:42.791250 6138 services_controller.go:356] Processing sync for service openshift-ingress-operator/metrics for network=default\\\\nF1202 11:51:42.791253 6138 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0983689d174a5aa7c091c0c556c3267513aaad4e56227d986a99dfb4d81d3442\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:51:56Z\\\",\\\"message\\\":\\\"363 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver/apiserver]} name:Service_openshift-kube-apiserver/apiserver_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.93:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d71b38eb-32af-4c0f-9490-7c317c111e3a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 11:51:56.712367 6363 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-gbxxd in node crc\\\\nI1202 11:51:56.712373 6363 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-gbxxd after 0 failed attempt(s)\\\\nI1202 11:51:56.712378 6363 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-gbxxd\\\\nF1202 11:51:56.712316 6363 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:57Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.252695 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bddfc4d8-9d79-4b83-9b40-38463890e808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23997cb093043089d5bcaf250c265456c9b5b6ac0bc52ab80b23a7264864ebfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://962a9159d7c11d17506ad7f068479425fcbf5124ef66e41aa2e60c23a8358a31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v2k4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:57Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.257208 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.257235 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.257247 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.257261 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.257270 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:57Z","lastTransitionTime":"2025-12-02T11:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.263054 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:57Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.272671 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:57Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.283158 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:57Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.299186 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:57Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.310954 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:57Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.322375 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:57Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.331709 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:57Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.341446 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qf9jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qf9jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:57Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.359183 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.359221 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.359229 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.359245 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.359254 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:57Z","lastTransitionTime":"2025-12-02T11:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.461703 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.461747 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.461759 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.461778 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.461790 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:57Z","lastTransitionTime":"2025-12-02T11:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.564361 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.564401 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.564411 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.564427 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.564437 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:57Z","lastTransitionTime":"2025-12-02T11:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.666139 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.666175 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.666186 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.666202 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.666211 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:57Z","lastTransitionTime":"2025-12-02T11:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.768717 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.768759 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.768770 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.768786 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.768797 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:57Z","lastTransitionTime":"2025-12-02T11:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.839643 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.839672 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.839676 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.839651 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:51:57 crc kubenswrapper[4721]: E1202 11:51:57.839745 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:51:57 crc kubenswrapper[4721]: E1202 11:51:57.839929 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:51:57 crc kubenswrapper[4721]: E1202 11:51:57.840013 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:51:57 crc kubenswrapper[4721]: E1202 11:51:57.840081 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.871507 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.871742 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.871805 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.871957 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.872030 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:57Z","lastTransitionTime":"2025-12-02T11:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.974668 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.974707 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.974716 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.974729 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:57 crc kubenswrapper[4721]: I1202 11:51:57.974739 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:57Z","lastTransitionTime":"2025-12-02T11:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.077142 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.077233 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.077250 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.077273 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.077291 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:58Z","lastTransitionTime":"2025-12-02T11:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.152726 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gbxxd_e2a903a9-d920-48ed-8450-fa2a4fdb458b/ovnkube-controller/2.log" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.179875 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.179926 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.179948 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.179967 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.179980 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:58Z","lastTransitionTime":"2025-12-02T11:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.282163 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.282198 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.282208 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.282226 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.282237 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:58Z","lastTransitionTime":"2025-12-02T11:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.384577 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.384618 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.384629 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.384645 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.384656 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:58Z","lastTransitionTime":"2025-12-02T11:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.487417 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.487474 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.487486 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.487503 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.487517 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:58Z","lastTransitionTime":"2025-12-02T11:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.590045 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.590108 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.590119 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.590133 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.590144 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:58Z","lastTransitionTime":"2025-12-02T11:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.692324 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.692379 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.692390 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.692405 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.692415 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:58Z","lastTransitionTime":"2025-12-02T11:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.794410 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.794446 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.794454 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.794467 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.794476 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:58Z","lastTransitionTime":"2025-12-02T11:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.896763 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.896827 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.896850 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.896878 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.896900 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:58Z","lastTransitionTime":"2025-12-02T11:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.999059 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.999098 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.999108 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.999121 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:58 crc kubenswrapper[4721]: I1202 11:51:58.999132 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:58Z","lastTransitionTime":"2025-12-02T11:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.102220 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.102285 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.102301 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.102324 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.102365 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:59Z","lastTransitionTime":"2025-12-02T11:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.204437 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.204486 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.204497 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.204510 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.204521 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:59Z","lastTransitionTime":"2025-12-02T11:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.306227 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.306297 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.306315 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.306375 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.306432 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:59Z","lastTransitionTime":"2025-12-02T11:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.409238 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.409287 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.409306 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.409329 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.409388 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:59Z","lastTransitionTime":"2025-12-02T11:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.512073 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.512160 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.512181 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.512211 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.512232 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:59Z","lastTransitionTime":"2025-12-02T11:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.615451 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.615532 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.615557 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.615587 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.615611 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:59Z","lastTransitionTime":"2025-12-02T11:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.661571 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.661633 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.661648 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.661667 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.661682 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:59Z","lastTransitionTime":"2025-12-02T11:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:59 crc kubenswrapper[4721]: E1202 11:51:59.676536 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:59Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.680361 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.680408 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.680423 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.680443 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.680458 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:59Z","lastTransitionTime":"2025-12-02T11:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:59 crc kubenswrapper[4721]: E1202 11:51:59.693093 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:59Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.696114 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.696189 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.696211 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.696240 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.696261 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:59Z","lastTransitionTime":"2025-12-02T11:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:59 crc kubenswrapper[4721]: E1202 11:51:59.714447 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:59Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.718743 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.718808 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.718822 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.718838 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.718896 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:59Z","lastTransitionTime":"2025-12-02T11:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:59 crc kubenswrapper[4721]: E1202 11:51:59.733023 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:59Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.736736 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.736768 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.736776 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.736792 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.736801 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:59Z","lastTransitionTime":"2025-12-02T11:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:59 crc kubenswrapper[4721]: E1202 11:51:59.748664 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:51:59Z is after 2025-08-24T17:21:41Z" Dec 02 11:51:59 crc kubenswrapper[4721]: E1202 11:51:59.748779 4721 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.750058 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.750118 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.750131 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.750149 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.750161 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:59Z","lastTransitionTime":"2025-12-02T11:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.839201 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.839224 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.839290 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.839310 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:51:59 crc kubenswrapper[4721]: E1202 11:51:59.839519 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:51:59 crc kubenswrapper[4721]: E1202 11:51:59.839709 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:51:59 crc kubenswrapper[4721]: E1202 11:51:59.839844 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:51:59 crc kubenswrapper[4721]: E1202 11:51:59.839898 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.853524 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.853651 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.853791 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.854023 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.854329 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:59Z","lastTransitionTime":"2025-12-02T11:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.957594 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.957660 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.957683 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.957712 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:51:59 crc kubenswrapper[4721]: I1202 11:51:59.957734 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:51:59Z","lastTransitionTime":"2025-12-02T11:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.035702 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.036976 4721 scope.go:117] "RemoveContainer" containerID="0983689d174a5aa7c091c0c556c3267513aaad4e56227d986a99dfb4d81d3442" Dec 02 11:52:00 crc kubenswrapper[4721]: E1202 11:52:00.037371 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-gbxxd_openshift-ovn-kubernetes(e2a903a9-d920-48ed-8450-fa2a4fdb458b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.048279 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:00Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.059914 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.059959 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.059972 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.059990 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.060003 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:00Z","lastTransitionTime":"2025-12-02T11:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.067465 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:00Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.083733 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:00Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.105724 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:00Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.125422 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:00Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.142475 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:00Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.157143 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:00Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.161370 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.161410 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.161420 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.161434 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.161445 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:00Z","lastTransitionTime":"2025-12-02T11:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.173248 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qf9jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qf9jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:00Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.185941 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:00Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.197977 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:00Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.210854 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:00Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.224514 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:00Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.241320 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:00Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.256531 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:00Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.263468 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.263539 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.263563 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.263592 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.263615 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:00Z","lastTransitionTime":"2025-12-02T11:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.272914 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d080fba3e6836b969d692aba325a32aaa82bdd9b6a28e5d893c334bed0568748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:00Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.291531 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0983689d174a5aa7c091c0c556c3267513aaad4e56227d986a99dfb4d81d3442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0983689d174a5aa7c091c0c556c3267513aaad4e56227d986a99dfb4d81d3442\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:51:56Z\\\",\\\"message\\\":\\\"363 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver/apiserver]} name:Service_openshift-kube-apiserver/apiserver_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.93:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d71b38eb-32af-4c0f-9490-7c317c111e3a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 11:51:56.712367 6363 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-gbxxd in node crc\\\\nI1202 11:51:56.712373 6363 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-gbxxd after 0 failed attempt(s)\\\\nI1202 11:51:56.712378 6363 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-gbxxd\\\\nF1202 11:51:56.712316 6363 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-gbxxd_openshift-ovn-kubernetes(e2a903a9-d920-48ed-8450-fa2a4fdb458b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:00Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.304306 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bddfc4d8-9d79-4b83-9b40-38463890e808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23997cb093043089d5bcaf250c265456c9b5b6ac0bc52ab80b23a7264864ebfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://962a9159d7c11d17506ad7f068479425fcbf5124ef66e41aa2e60c23a8358a31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v2k4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:00Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.366407 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.366452 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.366485 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.366504 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.366514 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:00Z","lastTransitionTime":"2025-12-02T11:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.468894 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.468953 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.468973 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.468996 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.469012 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:00Z","lastTransitionTime":"2025-12-02T11:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.479829 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-metrics-certs\") pod \"network-metrics-daemon-qf9jp\" (UID: \"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\") " pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:00 crc kubenswrapper[4721]: E1202 11:52:00.480018 4721 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 11:52:00 crc kubenswrapper[4721]: E1202 11:52:00.480078 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-metrics-certs podName:c1b39d16-c46b-40dc-a2a6-600f96eb93f7 nodeName:}" failed. No retries permitted until 2025-12-02 11:52:16.480062627 +0000 UTC m=+66.218612976 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-metrics-certs") pod "network-metrics-daemon-qf9jp" (UID: "c1b39d16-c46b-40dc-a2a6-600f96eb93f7") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.572494 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.572771 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.572851 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.572934 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.572998 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:00Z","lastTransitionTime":"2025-12-02T11:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.677955 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.678052 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.678084 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.678121 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.678159 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:00Z","lastTransitionTime":"2025-12-02T11:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.781586 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.781624 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.781635 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.781650 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.781660 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:00Z","lastTransitionTime":"2025-12-02T11:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.861584 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:00Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.877313 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:00Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.884880 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.884931 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.884946 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.884965 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.884981 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:00Z","lastTransitionTime":"2025-12-02T11:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.893303 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:00Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.915562 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:00Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.933821 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:00Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.951286 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:00Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.965995 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:00Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.975452 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qf9jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qf9jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:00Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.987375 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.987440 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.987452 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.987467 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.987478 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:00Z","lastTransitionTime":"2025-12-02T11:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:00 crc kubenswrapper[4721]: I1202 11:52:00.990833 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d080fba3e6836b969d692aba325a32aaa82bdd9b6a28e5d893c334bed0568748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:00Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.002627 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:01Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.015762 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:01Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.024887 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:01Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.034997 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:01Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.049815 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:01Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.060504 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:01Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.084487 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0983689d174a5aa7c091c0c556c3267513aaad4e56227d986a99dfb4d81d3442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0983689d174a5aa7c091c0c556c3267513aaad4e56227d986a99dfb4d81d3442\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:51:56Z\\\",\\\"message\\\":\\\"363 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver/apiserver]} name:Service_openshift-kube-apiserver/apiserver_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.93:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d71b38eb-32af-4c0f-9490-7c317c111e3a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 11:51:56.712367 6363 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-gbxxd in node crc\\\\nI1202 11:51:56.712373 6363 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-gbxxd after 0 failed attempt(s)\\\\nI1202 11:51:56.712378 6363 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-gbxxd\\\\nF1202 11:51:56.712316 6363 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-gbxxd_openshift-ovn-kubernetes(e2a903a9-d920-48ed-8450-fa2a4fdb458b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:01Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.089285 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.089358 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.089370 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.089383 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.089392 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:01Z","lastTransitionTime":"2025-12-02T11:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.101051 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bddfc4d8-9d79-4b83-9b40-38463890e808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23997cb093043089d5bcaf250c265456c9b5b6ac0bc52ab80b23a7264864ebfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://962a9159d7c11d17506ad7f068479425fcbf5124ef66e41aa2e60c23a8358a31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v2k4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:01Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.190999 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.191273 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.191493 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.191645 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.191962 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:01Z","lastTransitionTime":"2025-12-02T11:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.294594 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.294634 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.294649 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.294668 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.294681 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:01Z","lastTransitionTime":"2025-12-02T11:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.397236 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.397604 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.397760 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.397944 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.398097 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:01Z","lastTransitionTime":"2025-12-02T11:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.500893 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.500941 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.500951 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.500966 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.500980 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:01Z","lastTransitionTime":"2025-12-02T11:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.604049 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.604079 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.604088 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.604100 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.604108 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:01Z","lastTransitionTime":"2025-12-02T11:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.690767 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.690880 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.690907 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.690949 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.690969 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:01 crc kubenswrapper[4721]: E1202 11:52:01.690990 4721 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 11:52:01 crc kubenswrapper[4721]: E1202 11:52:01.691042 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 11:52:01 crc kubenswrapper[4721]: E1202 11:52:01.691054 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 11:52:01 crc kubenswrapper[4721]: E1202 11:52:01.691064 4721 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:52:01 crc kubenswrapper[4721]: E1202 11:52:01.691124 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 11:52:01 crc kubenswrapper[4721]: E1202 11:52:01.691133 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 11:52:01 crc kubenswrapper[4721]: E1202 11:52:01.691140 4721 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:52:01 crc kubenswrapper[4721]: E1202 11:52:01.691039 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:52:33.691007006 +0000 UTC m=+83.429557365 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:52:01 crc kubenswrapper[4721]: E1202 11:52:01.691177 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 11:52:33.691164011 +0000 UTC m=+83.429714360 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 11:52:01 crc kubenswrapper[4721]: E1202 11:52:01.691187 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 11:52:33.691181971 +0000 UTC m=+83.429732320 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:52:01 crc kubenswrapper[4721]: E1202 11:52:01.691198 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 11:52:33.691193312 +0000 UTC m=+83.429743651 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:52:01 crc kubenswrapper[4721]: E1202 11:52:01.691208 4721 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 11:52:01 crc kubenswrapper[4721]: E1202 11:52:01.691262 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 11:52:33.691251583 +0000 UTC m=+83.429801942 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.706386 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.706426 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.706435 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.706450 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.706460 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:01Z","lastTransitionTime":"2025-12-02T11:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.809284 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.809329 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.809368 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.809386 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.809397 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:01Z","lastTransitionTime":"2025-12-02T11:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.838797 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.838838 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.838862 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.839017 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:01 crc kubenswrapper[4721]: E1202 11:52:01.839009 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:01 crc kubenswrapper[4721]: E1202 11:52:01.839120 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:01 crc kubenswrapper[4721]: E1202 11:52:01.839200 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:01 crc kubenswrapper[4721]: E1202 11:52:01.839398 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.912523 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.912565 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.912576 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.912591 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:01 crc kubenswrapper[4721]: I1202 11:52:01.912604 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:01Z","lastTransitionTime":"2025-12-02T11:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.015410 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.015443 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.015454 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.015473 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.015484 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:02Z","lastTransitionTime":"2025-12-02T11:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.117815 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.117852 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.117863 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.117916 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.117928 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:02Z","lastTransitionTime":"2025-12-02T11:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.220518 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.220588 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.220610 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.220635 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.220652 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:02Z","lastTransitionTime":"2025-12-02T11:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.322632 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.323057 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.323077 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.323105 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.323123 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:02Z","lastTransitionTime":"2025-12-02T11:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.426209 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.426473 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.426550 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.426627 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.426700 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:02Z","lastTransitionTime":"2025-12-02T11:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.528729 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.528770 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.528780 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.528794 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.528804 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:02Z","lastTransitionTime":"2025-12-02T11:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.631215 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.631267 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.631284 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.631310 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.631327 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:02Z","lastTransitionTime":"2025-12-02T11:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.734114 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.734168 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.734185 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.734207 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.734224 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:02Z","lastTransitionTime":"2025-12-02T11:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.837271 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.837325 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.837379 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.837411 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.837429 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:02Z","lastTransitionTime":"2025-12-02T11:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.940011 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.940078 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.940102 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.940129 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:02 crc kubenswrapper[4721]: I1202 11:52:02.940153 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:02Z","lastTransitionTime":"2025-12-02T11:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.042761 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.042808 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.042824 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.042848 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.042868 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:03Z","lastTransitionTime":"2025-12-02T11:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.144039 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.145239 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.145302 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.145326 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.145405 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.145481 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:03Z","lastTransitionTime":"2025-12-02T11:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.157072 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.165051 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:03Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.185177 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:03Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.206836 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:03Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.227598 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:03Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.247104 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:03Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.248648 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.248686 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.248698 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.248714 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.248727 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:03Z","lastTransitionTime":"2025-12-02T11:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.259430 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:03Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.279259 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qf9jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qf9jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:03Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.302860 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:03Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.315326 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:03Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.327055 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:03Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.339572 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:03Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.351194 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.351254 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.351265 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.351280 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.351289 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:03Z","lastTransitionTime":"2025-12-02T11:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.357643 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:03Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.372456 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:03Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.388071 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d080fba3e6836b969d692aba325a32aaa82bdd9b6a28e5d893c334bed0568748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:03Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.405010 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:03Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.435537 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0983689d174a5aa7c091c0c556c3267513aaad4e56227d986a99dfb4d81d3442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0983689d174a5aa7c091c0c556c3267513aaad4e56227d986a99dfb4d81d3442\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:51:56Z\\\",\\\"message\\\":\\\"363 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver/apiserver]} name:Service_openshift-kube-apiserver/apiserver_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.93:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d71b38eb-32af-4c0f-9490-7c317c111e3a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 11:51:56.712367 6363 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-gbxxd in node crc\\\\nI1202 11:51:56.712373 6363 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-gbxxd after 0 failed attempt(s)\\\\nI1202 11:51:56.712378 6363 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-gbxxd\\\\nF1202 11:51:56.712316 6363 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-gbxxd_openshift-ovn-kubernetes(e2a903a9-d920-48ed-8450-fa2a4fdb458b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:03Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.453502 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bddfc4d8-9d79-4b83-9b40-38463890e808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23997cb093043089d5bcaf250c265456c9b5b6ac0bc52ab80b23a7264864ebfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://962a9159d7c11d17506ad7f068479425fcbf5124ef66e41aa2e60c23a8358a31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v2k4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:03Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.454462 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.454496 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.454504 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.454517 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.454527 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:03Z","lastTransitionTime":"2025-12-02T11:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.557185 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.557236 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.557258 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.557294 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.557307 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:03Z","lastTransitionTime":"2025-12-02T11:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.660082 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.660132 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.660144 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.660165 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.660183 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:03Z","lastTransitionTime":"2025-12-02T11:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.762390 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.762464 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.762486 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.762513 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.762537 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:03Z","lastTransitionTime":"2025-12-02T11:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.838873 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.838895 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.839010 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:03 crc kubenswrapper[4721]: E1202 11:52:03.839125 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.839147 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:03 crc kubenswrapper[4721]: E1202 11:52:03.839236 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:03 crc kubenswrapper[4721]: E1202 11:52:03.839375 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:03 crc kubenswrapper[4721]: E1202 11:52:03.839530 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.865361 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.865413 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.865430 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.865451 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.865465 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:03Z","lastTransitionTime":"2025-12-02T11:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.966867 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.966896 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.966904 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.966916 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:03 crc kubenswrapper[4721]: I1202 11:52:03.966924 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:03Z","lastTransitionTime":"2025-12-02T11:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.068811 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.068860 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.068870 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.068885 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.068893 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:04Z","lastTransitionTime":"2025-12-02T11:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.171463 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.171509 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.171531 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.171553 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.171568 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:04Z","lastTransitionTime":"2025-12-02T11:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.274579 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.274625 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.274636 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.274652 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.274664 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:04Z","lastTransitionTime":"2025-12-02T11:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.377515 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.377566 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.377577 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.377597 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.377609 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:04Z","lastTransitionTime":"2025-12-02T11:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.480166 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.480205 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.480213 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.480227 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.480236 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:04Z","lastTransitionTime":"2025-12-02T11:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.582872 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.582909 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.582943 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.582962 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.582972 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:04Z","lastTransitionTime":"2025-12-02T11:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.686223 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.686266 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.686276 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.686308 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.686319 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:04Z","lastTransitionTime":"2025-12-02T11:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.789221 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.789302 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.789314 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.789330 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.789372 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:04Z","lastTransitionTime":"2025-12-02T11:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.891796 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.891875 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.891899 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.891929 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.891950 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:04Z","lastTransitionTime":"2025-12-02T11:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.994666 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.994733 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.994743 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.994765 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:04 crc kubenswrapper[4721]: I1202 11:52:04.994777 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:04Z","lastTransitionTime":"2025-12-02T11:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.098246 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.098294 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.098305 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.098323 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.098337 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:05Z","lastTransitionTime":"2025-12-02T11:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.208178 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.208266 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.208299 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.208333 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.208380 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:05Z","lastTransitionTime":"2025-12-02T11:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.312536 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.312615 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.312639 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.312671 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.312696 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:05Z","lastTransitionTime":"2025-12-02T11:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.416871 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.416939 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.416952 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.416974 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.416990 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:05Z","lastTransitionTime":"2025-12-02T11:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.519615 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.519675 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.519688 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.519703 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.519712 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:05Z","lastTransitionTime":"2025-12-02T11:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.622338 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.622454 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.622474 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.622509 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.622532 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:05Z","lastTransitionTime":"2025-12-02T11:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.725014 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.725045 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.725052 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.725065 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.725074 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:05Z","lastTransitionTime":"2025-12-02T11:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.827700 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.827747 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.827758 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.827777 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.827788 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:05Z","lastTransitionTime":"2025-12-02T11:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.839281 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.839337 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:05 crc kubenswrapper[4721]: E1202 11:52:05.839415 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:05 crc kubenswrapper[4721]: E1202 11:52:05.839497 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.839566 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:05 crc kubenswrapper[4721]: E1202 11:52:05.839622 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.839670 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:05 crc kubenswrapper[4721]: E1202 11:52:05.839717 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.931627 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.931725 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.931750 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.931787 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:05 crc kubenswrapper[4721]: I1202 11:52:05.931814 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:05Z","lastTransitionTime":"2025-12-02T11:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.035328 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.035406 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.035419 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.035434 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.035443 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:06Z","lastTransitionTime":"2025-12-02T11:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.137806 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.137861 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.137871 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.137884 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.137894 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:06Z","lastTransitionTime":"2025-12-02T11:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.240025 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.240085 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.240106 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.240131 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.240150 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:06Z","lastTransitionTime":"2025-12-02T11:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.342724 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.342807 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.342827 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.342864 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.342884 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:06Z","lastTransitionTime":"2025-12-02T11:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.446160 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.446229 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.446248 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.446275 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.446293 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:06Z","lastTransitionTime":"2025-12-02T11:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.550308 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.550418 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.550448 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.550484 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.550509 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:06Z","lastTransitionTime":"2025-12-02T11:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.654144 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.654207 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.654224 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.654249 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.654267 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:06Z","lastTransitionTime":"2025-12-02T11:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.757403 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.757503 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.757530 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.757560 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.757583 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:06Z","lastTransitionTime":"2025-12-02T11:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.860625 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.860744 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.860771 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.860806 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.860828 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:06Z","lastTransitionTime":"2025-12-02T11:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.964669 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.964730 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.964742 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.964762 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:06 crc kubenswrapper[4721]: I1202 11:52:06.964775 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:06Z","lastTransitionTime":"2025-12-02T11:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.067824 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.067866 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.067877 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.067892 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.067903 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:07Z","lastTransitionTime":"2025-12-02T11:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.171491 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.171538 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.171561 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.171582 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.171600 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:07Z","lastTransitionTime":"2025-12-02T11:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.274648 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.274712 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.274724 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.274754 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.274765 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:07Z","lastTransitionTime":"2025-12-02T11:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.377894 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.377949 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.377972 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.378000 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.378016 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:07Z","lastTransitionTime":"2025-12-02T11:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.481472 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.481520 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.481531 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.481548 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.481559 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:07Z","lastTransitionTime":"2025-12-02T11:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.584449 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.584515 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.584532 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.584558 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.584576 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:07Z","lastTransitionTime":"2025-12-02T11:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.688634 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.688715 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.688733 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.688756 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.688775 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:07Z","lastTransitionTime":"2025-12-02T11:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.792113 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.792224 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.792249 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.792281 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.792305 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:07Z","lastTransitionTime":"2025-12-02T11:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.838747 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.838785 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.838809 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:07 crc kubenswrapper[4721]: E1202 11:52:07.838890 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.838952 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:07 crc kubenswrapper[4721]: E1202 11:52:07.838998 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:07 crc kubenswrapper[4721]: E1202 11:52:07.839079 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:07 crc kubenswrapper[4721]: E1202 11:52:07.839271 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.897039 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.897108 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.897128 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.897157 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:07 crc kubenswrapper[4721]: I1202 11:52:07.897177 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:07Z","lastTransitionTime":"2025-12-02T11:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.003262 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.003318 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.003328 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.003357 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.003367 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:08Z","lastTransitionTime":"2025-12-02T11:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.105842 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.105889 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.105901 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.105919 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.105932 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:08Z","lastTransitionTime":"2025-12-02T11:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.208435 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.208485 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.208500 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.208518 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.208531 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:08Z","lastTransitionTime":"2025-12-02T11:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.311068 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.311167 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.311177 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.311189 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.311198 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:08Z","lastTransitionTime":"2025-12-02T11:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.413487 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.413546 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.413564 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.413588 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.413605 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:08Z","lastTransitionTime":"2025-12-02T11:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.515788 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.515848 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.515865 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.515889 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.515905 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:08Z","lastTransitionTime":"2025-12-02T11:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.618522 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.618557 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.618565 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.618577 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.618586 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:08Z","lastTransitionTime":"2025-12-02T11:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.721451 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.721502 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.721516 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.721535 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.721547 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:08Z","lastTransitionTime":"2025-12-02T11:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.824263 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.824307 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.824321 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.824361 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.824377 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:08Z","lastTransitionTime":"2025-12-02T11:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.926749 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.926810 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.926832 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.926860 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:08 crc kubenswrapper[4721]: I1202 11:52:08.926885 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:08Z","lastTransitionTime":"2025-12-02T11:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.030110 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.030150 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.030161 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.030177 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.030189 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:09Z","lastTransitionTime":"2025-12-02T11:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.133019 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.133072 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.133084 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.133101 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.133115 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:09Z","lastTransitionTime":"2025-12-02T11:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.236056 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.236094 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.236103 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.236116 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.236125 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:09Z","lastTransitionTime":"2025-12-02T11:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.339190 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.339231 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.339241 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.339255 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.339266 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:09Z","lastTransitionTime":"2025-12-02T11:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.442393 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.442460 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.442483 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.442510 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.442531 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:09Z","lastTransitionTime":"2025-12-02T11:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.544804 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.544879 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.544903 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.544963 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.544990 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:09Z","lastTransitionTime":"2025-12-02T11:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.649695 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.649745 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.649761 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.649783 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.649800 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:09Z","lastTransitionTime":"2025-12-02T11:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.752107 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.752166 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.752182 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.752206 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.752224 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:09Z","lastTransitionTime":"2025-12-02T11:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.839565 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.839661 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.839675 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:09 crc kubenswrapper[4721]: E1202 11:52:09.839783 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.839800 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:09 crc kubenswrapper[4721]: E1202 11:52:09.839951 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:09 crc kubenswrapper[4721]: E1202 11:52:09.840019 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:09 crc kubenswrapper[4721]: E1202 11:52:09.840165 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.854121 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.854181 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.854195 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.854234 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.854246 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:09Z","lastTransitionTime":"2025-12-02T11:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.868843 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.868900 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.868916 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.868951 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.868975 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:09Z","lastTransitionTime":"2025-12-02T11:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:09 crc kubenswrapper[4721]: E1202 11:52:09.888977 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:09Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.893902 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.893943 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.893956 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.893976 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.893989 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:09Z","lastTransitionTime":"2025-12-02T11:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:09 crc kubenswrapper[4721]: E1202 11:52:09.908270 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:09Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.913334 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.913800 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.913810 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.913826 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.913838 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:09Z","lastTransitionTime":"2025-12-02T11:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:09 crc kubenswrapper[4721]: E1202 11:52:09.930930 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:09Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.936099 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.936158 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.936175 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.936298 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.936330 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:09Z","lastTransitionTime":"2025-12-02T11:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:09 crc kubenswrapper[4721]: E1202 11:52:09.952240 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:09Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.958810 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.958954 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.958994 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.959019 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.959037 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:09Z","lastTransitionTime":"2025-12-02T11:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:09 crc kubenswrapper[4721]: E1202 11:52:09.973441 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:09Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:09 crc kubenswrapper[4721]: E1202 11:52:09.973556 4721 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.975482 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.975531 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.975556 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.975570 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:09 crc kubenswrapper[4721]: I1202 11:52:09.975580 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:09Z","lastTransitionTime":"2025-12-02T11:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.077926 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.077965 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.077975 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.077989 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.078000 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:10Z","lastTransitionTime":"2025-12-02T11:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.180653 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.180689 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.180700 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.180716 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.180729 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:10Z","lastTransitionTime":"2025-12-02T11:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.283048 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.283122 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.283144 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.283170 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.283188 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:10Z","lastTransitionTime":"2025-12-02T11:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.385384 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.385429 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.385439 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.385455 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.385466 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:10Z","lastTransitionTime":"2025-12-02T11:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.488378 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.488443 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.488462 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.488486 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.488503 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:10Z","lastTransitionTime":"2025-12-02T11:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.590428 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.590462 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.590473 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.590488 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.590498 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:10Z","lastTransitionTime":"2025-12-02T11:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.692714 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.692757 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.692767 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.692781 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.692821 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:10Z","lastTransitionTime":"2025-12-02T11:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.794640 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.794675 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.794684 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.794697 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.794705 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:10Z","lastTransitionTime":"2025-12-02T11:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.859600 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:10Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.877631 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:10Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.887544 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:10Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.896768 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qf9jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qf9jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:10Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.903668 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.903701 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.903709 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.903723 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.903732 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:10Z","lastTransitionTime":"2025-12-02T11:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.913281 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9be7d0c0-4bed-46b7-933f-d20879e184c5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1831ed7c5b757a786e658ca6a57ffa86815b41736b0d2aa32276d8311a9e8e60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0e89a391ffe89140ff679974abf51939d2852bf0c8b707e199be40fc5fc1f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fb6fb086b81deae49d00a8a09742c1fec519578ec195133c817d72e91b5fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbb42628caa9c23ab5e9ba95b03ab5cf41e39c2175e3fbb1871bf08437f8498b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbb42628caa9c23ab5e9ba95b03ab5cf41e39c2175e3fbb1871bf08437f8498b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:10Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.941123 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:10Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.956358 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:10Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.967789 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:10Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.980357 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:10Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:10 crc kubenswrapper[4721]: I1202 11:52:10.993133 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:10Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.003454 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:11Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.006193 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.006221 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.006229 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.006242 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.006253 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:11Z","lastTransitionTime":"2025-12-02T11:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.015935 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d080fba3e6836b969d692aba325a32aaa82bdd9b6a28e5d893c334bed0568748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:11Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.026453 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:11Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.037377 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:11Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.053553 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0983689d174a5aa7c091c0c556c3267513aaad4e56227d986a99dfb4d81d3442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0983689d174a5aa7c091c0c556c3267513aaad4e56227d986a99dfb4d81d3442\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:51:56Z\\\",\\\"message\\\":\\\"363 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver/apiserver]} name:Service_openshift-kube-apiserver/apiserver_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.93:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d71b38eb-32af-4c0f-9490-7c317c111e3a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 11:51:56.712367 6363 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-gbxxd in node crc\\\\nI1202 11:51:56.712373 6363 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-gbxxd after 0 failed attempt(s)\\\\nI1202 11:51:56.712378 6363 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-gbxxd\\\\nF1202 11:51:56.712316 6363 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-gbxxd_openshift-ovn-kubernetes(e2a903a9-d920-48ed-8450-fa2a4fdb458b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:11Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.065917 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bddfc4d8-9d79-4b83-9b40-38463890e808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23997cb093043089d5bcaf250c265456c9b5b6ac0bc52ab80b23a7264864ebfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://962a9159d7c11d17506ad7f068479425fcbf5124ef66e41aa2e60c23a8358a31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v2k4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:11Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.077230 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:11Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.087724 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:11Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.109167 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.109192 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.109201 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.109214 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.109224 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:11Z","lastTransitionTime":"2025-12-02T11:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.211031 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.211058 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.211065 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.211078 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.211087 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:11Z","lastTransitionTime":"2025-12-02T11:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.313109 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.313193 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.313213 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.313241 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.313262 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:11Z","lastTransitionTime":"2025-12-02T11:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.415036 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.415096 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.415107 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.415124 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.415134 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:11Z","lastTransitionTime":"2025-12-02T11:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.518394 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.518473 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.518496 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.518525 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.518546 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:11Z","lastTransitionTime":"2025-12-02T11:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.620899 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.621185 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.621253 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.621325 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.621422 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:11Z","lastTransitionTime":"2025-12-02T11:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.723980 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.724024 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.724035 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.724053 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.724078 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:11Z","lastTransitionTime":"2025-12-02T11:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.826927 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.826965 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.826974 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.826989 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.827000 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:11Z","lastTransitionTime":"2025-12-02T11:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.838823 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.838867 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:11 crc kubenswrapper[4721]: E1202 11:52:11.838915 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.839024 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.839028 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:11 crc kubenswrapper[4721]: E1202 11:52:11.839190 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:11 crc kubenswrapper[4721]: E1202 11:52:11.839368 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:11 crc kubenswrapper[4721]: E1202 11:52:11.839439 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.931538 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.931594 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.931608 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.931629 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:11 crc kubenswrapper[4721]: I1202 11:52:11.931644 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:11Z","lastTransitionTime":"2025-12-02T11:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.034047 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.034078 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.034086 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.034100 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.034110 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:12Z","lastTransitionTime":"2025-12-02T11:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.136996 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.137036 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.137048 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.137064 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.137075 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:12Z","lastTransitionTime":"2025-12-02T11:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.239802 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.239865 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.239883 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.239908 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.239925 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:12Z","lastTransitionTime":"2025-12-02T11:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.341962 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.342002 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.342014 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.342030 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.342042 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:12Z","lastTransitionTime":"2025-12-02T11:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.445976 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.446090 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.446113 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.446141 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.446165 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:12Z","lastTransitionTime":"2025-12-02T11:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.549407 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.549465 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.549476 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.549492 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.549502 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:12Z","lastTransitionTime":"2025-12-02T11:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.652626 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.652668 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.652677 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.652692 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.652702 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:12Z","lastTransitionTime":"2025-12-02T11:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.755410 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.755445 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.755456 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.755471 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.755482 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:12Z","lastTransitionTime":"2025-12-02T11:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.857443 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.857483 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.857491 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.857503 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.857513 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:12Z","lastTransitionTime":"2025-12-02T11:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.960065 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.960091 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.960100 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.960112 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:12 crc kubenswrapper[4721]: I1202 11:52:12.960121 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:12Z","lastTransitionTime":"2025-12-02T11:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.062927 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.062960 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.062968 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.062990 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.063016 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:13Z","lastTransitionTime":"2025-12-02T11:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.165426 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.165470 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.165478 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.165491 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.165500 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:13Z","lastTransitionTime":"2025-12-02T11:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.267596 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.267641 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.267650 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.267663 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.267670 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:13Z","lastTransitionTime":"2025-12-02T11:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.370159 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.370222 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.370240 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.370263 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.370281 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:13Z","lastTransitionTime":"2025-12-02T11:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.473874 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.473913 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.473924 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.473939 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.473950 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:13Z","lastTransitionTime":"2025-12-02T11:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.575813 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.575883 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.575901 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.575927 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.575948 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:13Z","lastTransitionTime":"2025-12-02T11:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.678085 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.678160 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.678184 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.678213 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.678234 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:13Z","lastTransitionTime":"2025-12-02T11:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.781319 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.781372 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.781384 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.781400 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.781410 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:13Z","lastTransitionTime":"2025-12-02T11:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.839137 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.839235 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.839162 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.839134 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:13 crc kubenswrapper[4721]: E1202 11:52:13.839295 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:13 crc kubenswrapper[4721]: E1202 11:52:13.839436 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:13 crc kubenswrapper[4721]: E1202 11:52:13.839872 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:13 crc kubenswrapper[4721]: E1202 11:52:13.839997 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.840161 4721 scope.go:117] "RemoveContainer" containerID="0983689d174a5aa7c091c0c556c3267513aaad4e56227d986a99dfb4d81d3442" Dec 02 11:52:13 crc kubenswrapper[4721]: E1202 11:52:13.840463 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-gbxxd_openshift-ovn-kubernetes(e2a903a9-d920-48ed-8450-fa2a4fdb458b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.883885 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.883937 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.883952 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.883973 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.883986 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:13Z","lastTransitionTime":"2025-12-02T11:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.985790 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.985834 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.985846 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.985861 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:13 crc kubenswrapper[4721]: I1202 11:52:13.985872 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:13Z","lastTransitionTime":"2025-12-02T11:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.087768 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.087839 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.087851 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.087866 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.087875 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:14Z","lastTransitionTime":"2025-12-02T11:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.190119 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.190185 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.190196 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.190213 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.190229 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:14Z","lastTransitionTime":"2025-12-02T11:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.293159 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.293232 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.293243 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.293258 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.293331 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:14Z","lastTransitionTime":"2025-12-02T11:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.395286 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.395328 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.395382 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.395402 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.395417 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:14Z","lastTransitionTime":"2025-12-02T11:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.497861 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.497898 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.497908 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.497931 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.497946 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:14Z","lastTransitionTime":"2025-12-02T11:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.599730 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.599788 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.599798 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.599810 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.599819 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:14Z","lastTransitionTime":"2025-12-02T11:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.702734 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.702761 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.702769 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.702782 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.702790 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:14Z","lastTransitionTime":"2025-12-02T11:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.805139 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.805180 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.805190 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.805205 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.805215 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:14Z","lastTransitionTime":"2025-12-02T11:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.907979 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.908031 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.908044 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.908061 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:14 crc kubenswrapper[4721]: I1202 11:52:14.908075 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:14Z","lastTransitionTime":"2025-12-02T11:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.010285 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.010363 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.010376 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.010392 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.010403 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:15Z","lastTransitionTime":"2025-12-02T11:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.112356 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.112400 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.112415 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.112433 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.112446 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:15Z","lastTransitionTime":"2025-12-02T11:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.213833 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.213905 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.213915 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.213928 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.213938 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:15Z","lastTransitionTime":"2025-12-02T11:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.316292 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.316357 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.316366 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.316382 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.316392 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:15Z","lastTransitionTime":"2025-12-02T11:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.419134 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.419173 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.419181 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.419196 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.419206 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:15Z","lastTransitionTime":"2025-12-02T11:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.520851 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.520897 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.520909 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.520924 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.520933 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:15Z","lastTransitionTime":"2025-12-02T11:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.623714 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.623746 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.623755 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.623770 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.623780 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:15Z","lastTransitionTime":"2025-12-02T11:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.727094 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.727169 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.727180 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.727196 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.727207 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:15Z","lastTransitionTime":"2025-12-02T11:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.829437 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.829470 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.829478 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.829489 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.829497 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:15Z","lastTransitionTime":"2025-12-02T11:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.839004 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.839035 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:15 crc kubenswrapper[4721]: E1202 11:52:15.839096 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.839107 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:15 crc kubenswrapper[4721]: E1202 11:52:15.839192 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.839241 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:15 crc kubenswrapper[4721]: E1202 11:52:15.839286 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:15 crc kubenswrapper[4721]: E1202 11:52:15.839331 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.931848 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.931903 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.931920 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.931942 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:15 crc kubenswrapper[4721]: I1202 11:52:15.932007 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:15Z","lastTransitionTime":"2025-12-02T11:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.034318 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.034386 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.034398 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.034413 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.034426 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:16Z","lastTransitionTime":"2025-12-02T11:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.136034 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.136069 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.136080 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.136097 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.136108 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:16Z","lastTransitionTime":"2025-12-02T11:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.237796 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.237827 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.237835 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.237849 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.237858 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:16Z","lastTransitionTime":"2025-12-02T11:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.340400 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.340433 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.340442 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.340454 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.340463 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:16Z","lastTransitionTime":"2025-12-02T11:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.442208 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.442241 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.442248 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.442260 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.442268 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:16Z","lastTransitionTime":"2025-12-02T11:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.538582 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-metrics-certs\") pod \"network-metrics-daemon-qf9jp\" (UID: \"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\") " pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:16 crc kubenswrapper[4721]: E1202 11:52:16.538800 4721 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 11:52:16 crc kubenswrapper[4721]: E1202 11:52:16.538870 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-metrics-certs podName:c1b39d16-c46b-40dc-a2a6-600f96eb93f7 nodeName:}" failed. No retries permitted until 2025-12-02 11:52:48.538852882 +0000 UTC m=+98.277403251 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-metrics-certs") pod "network-metrics-daemon-qf9jp" (UID: "c1b39d16-c46b-40dc-a2a6-600f96eb93f7") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.544114 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.544147 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.544173 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.544189 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.544199 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:16Z","lastTransitionTime":"2025-12-02T11:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.645850 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.645883 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.645890 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.645902 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.645933 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:16Z","lastTransitionTime":"2025-12-02T11:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.748220 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.748260 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.748271 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.748287 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.748298 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:16Z","lastTransitionTime":"2025-12-02T11:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.850304 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.850356 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.850367 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.850379 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.850388 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:16Z","lastTransitionTime":"2025-12-02T11:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.952280 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.952322 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.952330 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.952358 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:16 crc kubenswrapper[4721]: I1202 11:52:16.952368 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:16Z","lastTransitionTime":"2025-12-02T11:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.054416 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.054454 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.054464 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.054477 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.054489 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:17Z","lastTransitionTime":"2025-12-02T11:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.156303 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.156357 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.156365 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.156377 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.156386 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:17Z","lastTransitionTime":"2025-12-02T11:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.258820 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.258864 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.258880 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.258901 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.258918 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:17Z","lastTransitionTime":"2025-12-02T11:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.361756 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.361806 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.361822 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.361842 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.361859 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:17Z","lastTransitionTime":"2025-12-02T11:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.464302 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.464400 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.464418 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.464903 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.464972 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:17Z","lastTransitionTime":"2025-12-02T11:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.567958 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.568008 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.568017 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.568030 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.568042 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:17Z","lastTransitionTime":"2025-12-02T11:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.670165 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.670204 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.670212 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.670226 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.670235 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:17Z","lastTransitionTime":"2025-12-02T11:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.772552 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.772604 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.772612 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.772632 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.772641 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:17Z","lastTransitionTime":"2025-12-02T11:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.839532 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.839629 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.839682 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:17 crc kubenswrapper[4721]: E1202 11:52:17.839835 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.839881 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:17 crc kubenswrapper[4721]: E1202 11:52:17.840070 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:17 crc kubenswrapper[4721]: E1202 11:52:17.840115 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:17 crc kubenswrapper[4721]: E1202 11:52:17.840240 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.875177 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.875203 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.875212 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.875224 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.875234 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:17Z","lastTransitionTime":"2025-12-02T11:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.977903 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.977960 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.977976 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.977999 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:17 crc kubenswrapper[4721]: I1202 11:52:17.978017 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:17Z","lastTransitionTime":"2025-12-02T11:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.080501 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.080816 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.080825 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.080839 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.080848 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:18Z","lastTransitionTime":"2025-12-02T11:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.182667 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.182728 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.182748 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.182777 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.182795 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:18Z","lastTransitionTime":"2025-12-02T11:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.213898 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jzz4m_3becf918-fa56-4f68-9963-b19fb91d4343/kube-multus/0.log" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.213945 4721 generic.go:334] "Generic (PLEG): container finished" podID="3becf918-fa56-4f68-9963-b19fb91d4343" containerID="ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e" exitCode=1 Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.213973 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jzz4m" event={"ID":"3becf918-fa56-4f68-9963-b19fb91d4343","Type":"ContainerDied","Data":"ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e"} Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.214323 4721 scope.go:117] "RemoveContainer" containerID="ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.232071 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:18Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.244906 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:18Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.256632 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:18Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.266927 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:18Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.277883 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:18Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:52:18Z\\\",\\\"message\\\":\\\"2025-12-02T11:51:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5dabad82-8c3a-4b20-9112-a3bba384b288\\\\n2025-12-02T11:51:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5dabad82-8c3a-4b20-9112-a3bba384b288 to /host/opt/cni/bin/\\\\n2025-12-02T11:51:32Z [verbose] multus-daemon started\\\\n2025-12-02T11:51:32Z [verbose] Readiness Indicator file check\\\\n2025-12-02T11:52:17Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:18Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.284598 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.284647 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.284656 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.284670 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.284679 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:18Z","lastTransitionTime":"2025-12-02T11:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.289142 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:18Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.310555 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d080fba3e6836b969d692aba325a32aaa82bdd9b6a28e5d893c334bed0568748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:18Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.330915 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0983689d174a5aa7c091c0c556c3267513aaad4e56227d986a99dfb4d81d3442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0983689d174a5aa7c091c0c556c3267513aaad4e56227d986a99dfb4d81d3442\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:51:56Z\\\",\\\"message\\\":\\\"363 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver/apiserver]} name:Service_openshift-kube-apiserver/apiserver_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.93:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d71b38eb-32af-4c0f-9490-7c317c111e3a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 11:51:56.712367 6363 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-gbxxd in node crc\\\\nI1202 11:51:56.712373 6363 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-gbxxd after 0 failed attempt(s)\\\\nI1202 11:51:56.712378 6363 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-gbxxd\\\\nF1202 11:51:56.712316 6363 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-gbxxd_openshift-ovn-kubernetes(e2a903a9-d920-48ed-8450-fa2a4fdb458b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:18Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.340220 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bddfc4d8-9d79-4b83-9b40-38463890e808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23997cb093043089d5bcaf250c265456c9b5b6ac0bc52ab80b23a7264864ebfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://962a9159d7c11d17506ad7f068479425fcbf5124ef66e41aa2e60c23a8358a31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v2k4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:18Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.350619 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:18Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.361870 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:18Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.372838 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:18Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.382513 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9be7d0c0-4bed-46b7-933f-d20879e184c5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1831ed7c5b757a786e658ca6a57ffa86815b41736b0d2aa32276d8311a9e8e60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0e89a391ffe89140ff679974abf51939d2852bf0c8b707e199be40fc5fc1f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fb6fb086b81deae49d00a8a09742c1fec519578ec195133c817d72e91b5fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbb42628caa9c23ab5e9ba95b03ab5cf41e39c2175e3fbb1871bf08437f8498b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbb42628caa9c23ab5e9ba95b03ab5cf41e39c2175e3fbb1871bf08437f8498b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:18Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.386075 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.386109 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.386119 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.386133 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.386143 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:18Z","lastTransitionTime":"2025-12-02T11:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.400269 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:18Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.414521 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:18Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.425044 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:18Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.433620 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:18Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.443652 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qf9jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qf9jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:18Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.488717 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.488756 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.488765 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.488781 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.488791 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:18Z","lastTransitionTime":"2025-12-02T11:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.591040 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.591080 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.591091 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.591108 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.591121 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:18Z","lastTransitionTime":"2025-12-02T11:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.693423 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.693455 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.693465 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.693477 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.693486 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:18Z","lastTransitionTime":"2025-12-02T11:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.795909 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.795941 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.795951 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.795964 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.795973 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:18Z","lastTransitionTime":"2025-12-02T11:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.898486 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.898556 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.898567 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.898583 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:18 crc kubenswrapper[4721]: I1202 11:52:18.898592 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:18Z","lastTransitionTime":"2025-12-02T11:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.001773 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.001809 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.001817 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.001830 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.001841 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:19Z","lastTransitionTime":"2025-12-02T11:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.104748 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.104799 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.104810 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.104825 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.104836 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:19Z","lastTransitionTime":"2025-12-02T11:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.207717 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.207746 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.207754 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.207769 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.207777 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:19Z","lastTransitionTime":"2025-12-02T11:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.218825 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jzz4m_3becf918-fa56-4f68-9963-b19fb91d4343/kube-multus/0.log" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.218910 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jzz4m" event={"ID":"3becf918-fa56-4f68-9963-b19fb91d4343","Type":"ContainerStarted","Data":"4f55051ff5eedef4b085c57ab10b6545bc8d2abdc70c6e15444e4e0adcd5f659"} Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.231455 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:19Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.241743 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:19Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.254936 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:19Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.264701 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9be7d0c0-4bed-46b7-933f-d20879e184c5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1831ed7c5b757a786e658ca6a57ffa86815b41736b0d2aa32276d8311a9e8e60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0e89a391ffe89140ff679974abf51939d2852bf0c8b707e199be40fc5fc1f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fb6fb086b81deae49d00a8a09742c1fec519578ec195133c817d72e91b5fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbb42628caa9c23ab5e9ba95b03ab5cf41e39c2175e3fbb1871bf08437f8498b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbb42628caa9c23ab5e9ba95b03ab5cf41e39c2175e3fbb1871bf08437f8498b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:19Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.281641 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:19Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.293563 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:19Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.304548 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:19Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.309964 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.310012 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.310022 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.310039 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.310052 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:19Z","lastTransitionTime":"2025-12-02T11:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.313464 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:19Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.323692 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qf9jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qf9jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:19Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.335971 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:19Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.351085 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:19Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.359429 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:19Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.373873 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:19Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.386448 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f55051ff5eedef4b085c57ab10b6545bc8d2abdc70c6e15444e4e0adcd5f659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:52:18Z\\\",\\\"message\\\":\\\"2025-12-02T11:51:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5dabad82-8c3a-4b20-9112-a3bba384b288\\\\n2025-12-02T11:51:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5dabad82-8c3a-4b20-9112-a3bba384b288 to /host/opt/cni/bin/\\\\n2025-12-02T11:51:32Z [verbose] multus-daemon started\\\\n2025-12-02T11:51:32Z [verbose] Readiness Indicator file check\\\\n2025-12-02T11:52:17Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:52:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:19Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.397258 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:19Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.408867 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d080fba3e6836b969d692aba325a32aaa82bdd9b6a28e5d893c334bed0568748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:19Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.412670 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.412707 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.412716 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.412731 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.412740 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:19Z","lastTransitionTime":"2025-12-02T11:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.429148 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0983689d174a5aa7c091c0c556c3267513aaad4e56227d986a99dfb4d81d3442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0983689d174a5aa7c091c0c556c3267513aaad4e56227d986a99dfb4d81d3442\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:51:56Z\\\",\\\"message\\\":\\\"363 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver/apiserver]} name:Service_openshift-kube-apiserver/apiserver_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.93:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d71b38eb-32af-4c0f-9490-7c317c111e3a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 11:51:56.712367 6363 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-gbxxd in node crc\\\\nI1202 11:51:56.712373 6363 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-gbxxd after 0 failed attempt(s)\\\\nI1202 11:51:56.712378 6363 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-gbxxd\\\\nF1202 11:51:56.712316 6363 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-gbxxd_openshift-ovn-kubernetes(e2a903a9-d920-48ed-8450-fa2a4fdb458b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:19Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.439664 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bddfc4d8-9d79-4b83-9b40-38463890e808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23997cb093043089d5bcaf250c265456c9b5b6ac0bc52ab80b23a7264864ebfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://962a9159d7c11d17506ad7f068479425fcbf5124ef66e41aa2e60c23a8358a31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v2k4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:19Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.514766 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.514796 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.514805 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.514817 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.514828 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:19Z","lastTransitionTime":"2025-12-02T11:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.617860 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.617898 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.617911 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.617926 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.617938 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:19Z","lastTransitionTime":"2025-12-02T11:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.720648 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.720875 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.720938 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.720996 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.721073 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:19Z","lastTransitionTime":"2025-12-02T11:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.823280 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.823315 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.823325 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.823361 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.823373 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:19Z","lastTransitionTime":"2025-12-02T11:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.839628 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.839648 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:19 crc kubenswrapper[4721]: E1202 11:52:19.839738 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.839878 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.839908 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:19 crc kubenswrapper[4721]: E1202 11:52:19.840096 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:19 crc kubenswrapper[4721]: E1202 11:52:19.840249 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:19 crc kubenswrapper[4721]: E1202 11:52:19.840307 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.926102 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.926146 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.926158 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.926175 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:19 crc kubenswrapper[4721]: I1202 11:52:19.926186 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:19Z","lastTransitionTime":"2025-12-02T11:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.027907 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.028107 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.028188 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.028258 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.028315 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:20Z","lastTransitionTime":"2025-12-02T11:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.129976 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.130231 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.130321 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.130421 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.130497 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:20Z","lastTransitionTime":"2025-12-02T11:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.170562 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.170702 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.170780 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.170870 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.170951 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:20Z","lastTransitionTime":"2025-12-02T11:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:20 crc kubenswrapper[4721]: E1202 11:52:20.185679 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:20Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.189441 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.189503 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.189520 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.189544 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.189562 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:20Z","lastTransitionTime":"2025-12-02T11:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:20 crc kubenswrapper[4721]: E1202 11:52:20.203351 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:20Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.206889 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.206944 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.206960 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.206982 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.206999 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:20Z","lastTransitionTime":"2025-12-02T11:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:20 crc kubenswrapper[4721]: E1202 11:52:20.219687 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:20Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.222542 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.222577 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.222585 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.222599 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.222608 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:20Z","lastTransitionTime":"2025-12-02T11:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:20 crc kubenswrapper[4721]: E1202 11:52:20.233120 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:20Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.237189 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.237238 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.237253 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.237270 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.237283 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:20Z","lastTransitionTime":"2025-12-02T11:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:20 crc kubenswrapper[4721]: E1202 11:52:20.248431 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:20Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:20 crc kubenswrapper[4721]: E1202 11:52:20.248580 4721 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.249931 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.249968 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.249981 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.249996 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.250008 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:20Z","lastTransitionTime":"2025-12-02T11:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.352995 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.353043 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.353057 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.353073 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.353086 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:20Z","lastTransitionTime":"2025-12-02T11:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.456190 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.456257 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.456281 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.456310 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.456331 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:20Z","lastTransitionTime":"2025-12-02T11:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.558140 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.558192 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.558204 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.558222 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.558238 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:20Z","lastTransitionTime":"2025-12-02T11:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.661438 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.661469 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.661478 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.661495 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.661506 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:20Z","lastTransitionTime":"2025-12-02T11:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.764057 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.764098 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.764110 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.764126 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.764137 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:20Z","lastTransitionTime":"2025-12-02T11:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.855113 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9be7d0c0-4bed-46b7-933f-d20879e184c5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1831ed7c5b757a786e658ca6a57ffa86815b41736b0d2aa32276d8311a9e8e60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0e89a391ffe89140ff679974abf51939d2852bf0c8b707e199be40fc5fc1f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fb6fb086b81deae49d00a8a09742c1fec519578ec195133c817d72e91b5fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbb42628caa9c23ab5e9ba95b03ab5cf41e39c2175e3fbb1871bf08437f8498b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbb42628caa9c23ab5e9ba95b03ab5cf41e39c2175e3fbb1871bf08437f8498b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:20Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.868689 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.868757 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.868778 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.868804 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.868825 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:20Z","lastTransitionTime":"2025-12-02T11:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.877604 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:20Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.892616 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:20Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.905752 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:20Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.916630 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:20Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.931123 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qf9jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qf9jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:20Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.950491 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d080fba3e6836b969d692aba325a32aaa82bdd9b6a28e5d893c334bed0568748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:20Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.964626 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:20Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.971664 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.971723 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.971743 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.971770 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.971794 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:20Z","lastTransitionTime":"2025-12-02T11:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.979502 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:20Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:20 crc kubenswrapper[4721]: I1202 11:52:20.990754 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:20Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.002392 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:21Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.013545 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f55051ff5eedef4b085c57ab10b6545bc8d2abdc70c6e15444e4e0adcd5f659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:52:18Z\\\",\\\"message\\\":\\\"2025-12-02T11:51:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5dabad82-8c3a-4b20-9112-a3bba384b288\\\\n2025-12-02T11:51:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5dabad82-8c3a-4b20-9112-a3bba384b288 to /host/opt/cni/bin/\\\\n2025-12-02T11:51:32Z [verbose] multus-daemon started\\\\n2025-12-02T11:51:32Z [verbose] Readiness Indicator file check\\\\n2025-12-02T11:52:17Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:52:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:21Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.024189 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:21Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.042600 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0983689d174a5aa7c091c0c556c3267513aaad4e56227d986a99dfb4d81d3442\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0983689d174a5aa7c091c0c556c3267513aaad4e56227d986a99dfb4d81d3442\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:51:56Z\\\",\\\"message\\\":\\\"363 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver/apiserver]} name:Service_openshift-kube-apiserver/apiserver_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.93:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d71b38eb-32af-4c0f-9490-7c317c111e3a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 11:51:56.712367 6363 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-gbxxd in node crc\\\\nI1202 11:51:56.712373 6363 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-gbxxd after 0 failed attempt(s)\\\\nI1202 11:51:56.712378 6363 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-gbxxd\\\\nF1202 11:51:56.712316 6363 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-gbxxd_openshift-ovn-kubernetes(e2a903a9-d920-48ed-8450-fa2a4fdb458b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:21Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.054532 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bddfc4d8-9d79-4b83-9b40-38463890e808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23997cb093043089d5bcaf250c265456c9b5b6ac0bc52ab80b23a7264864ebfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://962a9159d7c11d17506ad7f068479425fcbf5124ef66e41aa2e60c23a8358a31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v2k4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:21Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.069258 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:21Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.074512 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.074579 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.074596 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.074623 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.074643 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:21Z","lastTransitionTime":"2025-12-02T11:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.087255 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:21Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.107735 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:21Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.177043 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.177093 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.177108 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.177207 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.177225 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:21Z","lastTransitionTime":"2025-12-02T11:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.279466 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.279513 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.279522 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.279541 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.279551 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:21Z","lastTransitionTime":"2025-12-02T11:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.383239 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.383283 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.383294 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.383309 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.383319 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:21Z","lastTransitionTime":"2025-12-02T11:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.485950 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.485993 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.486004 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.486018 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.486030 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:21Z","lastTransitionTime":"2025-12-02T11:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.588464 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.588535 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.588558 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.588587 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.588608 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:21Z","lastTransitionTime":"2025-12-02T11:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.691076 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.691113 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.691123 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.691140 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.691150 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:21Z","lastTransitionTime":"2025-12-02T11:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.792962 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.792995 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.793003 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.793015 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.793024 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:21Z","lastTransitionTime":"2025-12-02T11:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.839666 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.839666 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.839671 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:21 crc kubenswrapper[4721]: E1202 11:52:21.839910 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:21 crc kubenswrapper[4721]: E1202 11:52:21.839789 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.839679 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:21 crc kubenswrapper[4721]: E1202 11:52:21.839993 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:21 crc kubenswrapper[4721]: E1202 11:52:21.840080 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.895508 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.895581 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.895597 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.895619 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.895636 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:21Z","lastTransitionTime":"2025-12-02T11:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.997993 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.998025 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.998033 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.998045 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:21 crc kubenswrapper[4721]: I1202 11:52:21.998053 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:21Z","lastTransitionTime":"2025-12-02T11:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.099724 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.099780 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.099792 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.099825 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.099837 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:22Z","lastTransitionTime":"2025-12-02T11:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.202322 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.202387 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.202399 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.202416 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.202428 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:22Z","lastTransitionTime":"2025-12-02T11:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.304505 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.304542 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.304552 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.304582 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.304593 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:22Z","lastTransitionTime":"2025-12-02T11:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.406678 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.406712 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.406721 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.406733 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.406741 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:22Z","lastTransitionTime":"2025-12-02T11:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.509533 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.509624 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.509695 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.509726 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.510699 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:22Z","lastTransitionTime":"2025-12-02T11:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.613868 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.613907 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.613915 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.613927 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.613938 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:22Z","lastTransitionTime":"2025-12-02T11:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.716800 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.716851 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.716861 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.716875 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.716885 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:22Z","lastTransitionTime":"2025-12-02T11:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.819275 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.819318 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.819329 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.819370 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.819408 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:22Z","lastTransitionTime":"2025-12-02T11:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.929094 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.929144 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.929156 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.929172 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:22 crc kubenswrapper[4721]: I1202 11:52:22.929183 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:22Z","lastTransitionTime":"2025-12-02T11:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.032586 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.032629 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.032638 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.032652 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.032661 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:23Z","lastTransitionTime":"2025-12-02T11:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.135404 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.135457 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.135473 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.135497 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.135515 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:23Z","lastTransitionTime":"2025-12-02T11:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.238385 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.238419 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.238427 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.238439 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.238449 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:23Z","lastTransitionTime":"2025-12-02T11:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.341170 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.341254 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.341263 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.341276 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.341285 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:23Z","lastTransitionTime":"2025-12-02T11:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.443787 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.443828 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.443840 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.443855 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.443867 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:23Z","lastTransitionTime":"2025-12-02T11:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.546089 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.546149 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.546187 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.546210 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.546225 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:23Z","lastTransitionTime":"2025-12-02T11:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.648236 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.648268 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.648277 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.648288 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.648298 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:23Z","lastTransitionTime":"2025-12-02T11:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.749691 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.749726 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.749735 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.749748 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.749756 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:23Z","lastTransitionTime":"2025-12-02T11:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.839018 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.839187 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:23 crc kubenswrapper[4721]: E1202 11:52:23.839372 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.839418 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:23 crc kubenswrapper[4721]: E1202 11:52:23.839456 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.839451 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:23 crc kubenswrapper[4721]: E1202 11:52:23.839597 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:23 crc kubenswrapper[4721]: E1202 11:52:23.839790 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.852206 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.852236 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.852284 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.852300 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.852310 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:23Z","lastTransitionTime":"2025-12-02T11:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.955132 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.955169 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.955180 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.955194 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:23 crc kubenswrapper[4721]: I1202 11:52:23.955204 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:23Z","lastTransitionTime":"2025-12-02T11:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.057389 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.057420 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.057428 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.057440 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.057448 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:24Z","lastTransitionTime":"2025-12-02T11:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.159172 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.159205 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.159213 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.159225 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.159235 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:24Z","lastTransitionTime":"2025-12-02T11:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.261162 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.261225 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.261239 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.261263 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.261277 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:24Z","lastTransitionTime":"2025-12-02T11:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.364126 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.364174 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.364183 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.364200 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.364212 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:24Z","lastTransitionTime":"2025-12-02T11:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.466598 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.466636 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.466650 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.466666 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.466681 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:24Z","lastTransitionTime":"2025-12-02T11:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.568597 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.568635 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.568644 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.568655 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.568664 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:24Z","lastTransitionTime":"2025-12-02T11:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.670784 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.670814 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.670825 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.670840 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.670850 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:24Z","lastTransitionTime":"2025-12-02T11:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.773164 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.773203 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.773212 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.773226 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.773238 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:24Z","lastTransitionTime":"2025-12-02T11:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.875132 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.875169 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.875180 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.875195 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.875205 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:24Z","lastTransitionTime":"2025-12-02T11:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.977122 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.977160 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.977173 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.977188 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:24 crc kubenswrapper[4721]: I1202 11:52:24.977198 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:24Z","lastTransitionTime":"2025-12-02T11:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.079020 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.079065 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.079077 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.079091 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.079103 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:25Z","lastTransitionTime":"2025-12-02T11:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.181935 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.181972 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.181981 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.181996 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.182005 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:25Z","lastTransitionTime":"2025-12-02T11:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.283976 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.284010 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.284019 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.284033 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.284043 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:25Z","lastTransitionTime":"2025-12-02T11:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.387665 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.387732 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.387751 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.387775 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.387798 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:25Z","lastTransitionTime":"2025-12-02T11:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.490210 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.490240 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.490248 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.490259 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.490267 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:25Z","lastTransitionTime":"2025-12-02T11:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.592923 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.593167 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.593258 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.593367 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.593459 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:25Z","lastTransitionTime":"2025-12-02T11:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.695701 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.695732 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.695740 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.695753 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.695762 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:25Z","lastTransitionTime":"2025-12-02T11:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.798040 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.798131 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.798154 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.798653 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.798914 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:25Z","lastTransitionTime":"2025-12-02T11:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.838989 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.839025 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.838994 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:25 crc kubenswrapper[4721]: E1202 11:52:25.839113 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.838984 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:25 crc kubenswrapper[4721]: E1202 11:52:25.839215 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:25 crc kubenswrapper[4721]: E1202 11:52:25.839288 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:25 crc kubenswrapper[4721]: E1202 11:52:25.839335 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.901936 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.902190 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.902283 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.902417 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:25 crc kubenswrapper[4721]: I1202 11:52:25.902573 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:25Z","lastTransitionTime":"2025-12-02T11:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.005088 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.005167 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.005191 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.005223 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.005250 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:26Z","lastTransitionTime":"2025-12-02T11:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.108389 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.108449 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.108462 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.108479 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.108493 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:26Z","lastTransitionTime":"2025-12-02T11:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.211476 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.211540 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.211556 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.211575 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.211589 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:26Z","lastTransitionTime":"2025-12-02T11:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.313813 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.313888 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.313904 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.313922 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.313959 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:26Z","lastTransitionTime":"2025-12-02T11:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.416898 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.416970 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.416996 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.417022 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.417041 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:26Z","lastTransitionTime":"2025-12-02T11:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.519991 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.520057 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.520066 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.520101 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.520112 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:26Z","lastTransitionTime":"2025-12-02T11:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.622580 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.622625 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.622638 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.622654 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.622667 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:26Z","lastTransitionTime":"2025-12-02T11:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.725063 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.725140 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.725167 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.725196 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.725215 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:26Z","lastTransitionTime":"2025-12-02T11:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.827512 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.827569 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.827619 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.827643 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.827662 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:26Z","lastTransitionTime":"2025-12-02T11:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.930552 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.930586 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.930594 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.930607 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:26 crc kubenswrapper[4721]: I1202 11:52:26.930616 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:26Z","lastTransitionTime":"2025-12-02T11:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.033425 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.033469 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.033480 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.033494 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.033505 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:27Z","lastTransitionTime":"2025-12-02T11:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.135550 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.135581 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.135593 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.135611 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.135621 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:27Z","lastTransitionTime":"2025-12-02T11:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.238132 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.238216 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.238236 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.238262 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.238287 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:27Z","lastTransitionTime":"2025-12-02T11:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.340398 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.340478 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.340496 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.340520 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.340537 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:27Z","lastTransitionTime":"2025-12-02T11:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.443399 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.443447 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.443459 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.443476 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.443489 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:27Z","lastTransitionTime":"2025-12-02T11:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.546479 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.546570 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.546602 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.546638 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.546658 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:27Z","lastTransitionTime":"2025-12-02T11:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.649415 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.649451 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.649460 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.649474 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.649485 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:27Z","lastTransitionTime":"2025-12-02T11:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.751333 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.751397 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.751407 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.751422 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.751432 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:27Z","lastTransitionTime":"2025-12-02T11:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.839096 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.839195 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:27 crc kubenswrapper[4721]: E1202 11:52:27.839319 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.839361 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.839373 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:27 crc kubenswrapper[4721]: E1202 11:52:27.839464 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:27 crc kubenswrapper[4721]: E1202 11:52:27.839898 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:27 crc kubenswrapper[4721]: E1202 11:52:27.839782 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.840726 4721 scope.go:117] "RemoveContainer" containerID="0983689d174a5aa7c091c0c556c3267513aaad4e56227d986a99dfb4d81d3442" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.854017 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.854058 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.854074 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.854095 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.854114 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:27Z","lastTransitionTime":"2025-12-02T11:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.959311 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.960165 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.960215 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.960511 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:27 crc kubenswrapper[4721]: I1202 11:52:27.960523 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:27Z","lastTransitionTime":"2025-12-02T11:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.062707 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.062766 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.062783 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.062807 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.062824 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:28Z","lastTransitionTime":"2025-12-02T11:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.164597 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.164656 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.164673 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.164697 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.164714 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:28Z","lastTransitionTime":"2025-12-02T11:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.246964 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gbxxd_e2a903a9-d920-48ed-8450-fa2a4fdb458b/ovnkube-controller/2.log" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.252025 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" event={"ID":"e2a903a9-d920-48ed-8450-fa2a4fdb458b","Type":"ContainerStarted","Data":"57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938"} Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.252619 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.267175 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.267234 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.267252 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.267277 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.267297 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:28Z","lastTransitionTime":"2025-12-02T11:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.267836 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:28Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.289574 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:28Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.303282 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:28Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.320250 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qf9jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qf9jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:28Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.336572 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9be7d0c0-4bed-46b7-933f-d20879e184c5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1831ed7c5b757a786e658ca6a57ffa86815b41736b0d2aa32276d8311a9e8e60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0e89a391ffe89140ff679974abf51939d2852bf0c8b707e199be40fc5fc1f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fb6fb086b81deae49d00a8a09742c1fec519578ec195133c817d72e91b5fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbb42628caa9c23ab5e9ba95b03ab5cf41e39c2175e3fbb1871bf08437f8498b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbb42628caa9c23ab5e9ba95b03ab5cf41e39c2175e3fbb1871bf08437f8498b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:28Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.361666 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:28Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.370284 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.370337 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.370363 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.370378 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.370389 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:28Z","lastTransitionTime":"2025-12-02T11:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.374469 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:28Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.388406 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:28Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.404000 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:28Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.416454 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f55051ff5eedef4b085c57ab10b6545bc8d2abdc70c6e15444e4e0adcd5f659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:52:18Z\\\",\\\"message\\\":\\\"2025-12-02T11:51:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5dabad82-8c3a-4b20-9112-a3bba384b288\\\\n2025-12-02T11:51:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5dabad82-8c3a-4b20-9112-a3bba384b288 to /host/opt/cni/bin/\\\\n2025-12-02T11:51:32Z [verbose] multus-daemon started\\\\n2025-12-02T11:51:32Z [verbose] Readiness Indicator file check\\\\n2025-12-02T11:52:17Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:52:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:28Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.426330 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:28Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.439093 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d080fba3e6836b969d692aba325a32aaa82bdd9b6a28e5d893c334bed0568748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:28Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.450120 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:28Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.465860 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0983689d174a5aa7c091c0c556c3267513aaad4e56227d986a99dfb4d81d3442\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:51:56Z\\\",\\\"message\\\":\\\"363 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver/apiserver]} name:Service_openshift-kube-apiserver/apiserver_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.93:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d71b38eb-32af-4c0f-9490-7c317c111e3a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 11:51:56.712367 6363 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-gbxxd in node crc\\\\nI1202 11:51:56.712373 6363 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-gbxxd after 0 failed attempt(s)\\\\nI1202 11:51:56.712378 6363 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-gbxxd\\\\nF1202 11:51:56.712316 6363 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:28Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.472761 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.472800 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.472825 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.472843 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.472854 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:28Z","lastTransitionTime":"2025-12-02T11:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.476289 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bddfc4d8-9d79-4b83-9b40-38463890e808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23997cb093043089d5bcaf250c265456c9b5b6ac0bc52ab80b23a7264864ebfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://962a9159d7c11d17506ad7f068479425fcbf5124ef66e41aa2e60c23a8358a31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v2k4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:28Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.487601 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:28Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.502864 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:28Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.514252 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:28Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.575095 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.575126 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.575135 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.575148 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.575156 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:28Z","lastTransitionTime":"2025-12-02T11:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.677075 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.677110 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.677122 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.677140 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.677159 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:28Z","lastTransitionTime":"2025-12-02T11:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.780031 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.780075 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.780085 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.780101 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.780111 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:28Z","lastTransitionTime":"2025-12-02T11:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.882246 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.882288 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.882301 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.882316 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.882326 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:28Z","lastTransitionTime":"2025-12-02T11:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.985367 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.985415 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.985428 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.985446 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:28 crc kubenswrapper[4721]: I1202 11:52:28.985458 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:28Z","lastTransitionTime":"2025-12-02T11:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.087564 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.087620 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.087637 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.087660 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.087676 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:29Z","lastTransitionTime":"2025-12-02T11:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.190329 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.190379 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.190392 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.190412 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.190423 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:29Z","lastTransitionTime":"2025-12-02T11:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.258527 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gbxxd_e2a903a9-d920-48ed-8450-fa2a4fdb458b/ovnkube-controller/3.log" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.259321 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gbxxd_e2a903a9-d920-48ed-8450-fa2a4fdb458b/ovnkube-controller/2.log" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.263225 4721 generic.go:334] "Generic (PLEG): container finished" podID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerID="57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938" exitCode=1 Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.263262 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" event={"ID":"e2a903a9-d920-48ed-8450-fa2a4fdb458b","Type":"ContainerDied","Data":"57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938"} Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.263303 4721 scope.go:117] "RemoveContainer" containerID="0983689d174a5aa7c091c0c556c3267513aaad4e56227d986a99dfb4d81d3442" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.264698 4721 scope.go:117] "RemoveContainer" containerID="57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938" Dec 02 11:52:29 crc kubenswrapper[4721]: E1202 11:52:29.265013 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-gbxxd_openshift-ovn-kubernetes(e2a903a9-d920-48ed-8450-fa2a4fdb458b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.288257 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:29Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.294413 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.294501 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.294515 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.294560 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.294573 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:29Z","lastTransitionTime":"2025-12-02T11:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.305222 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:29Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.323185 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:29Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.339461 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qf9jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qf9jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:29Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.353427 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9be7d0c0-4bed-46b7-933f-d20879e184c5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1831ed7c5b757a786e658ca6a57ffa86815b41736b0d2aa32276d8311a9e8e60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0e89a391ffe89140ff679974abf51939d2852bf0c8b707e199be40fc5fc1f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fb6fb086b81deae49d00a8a09742c1fec519578ec195133c817d72e91b5fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbb42628caa9c23ab5e9ba95b03ab5cf41e39c2175e3fbb1871bf08437f8498b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbb42628caa9c23ab5e9ba95b03ab5cf41e39c2175e3fbb1871bf08437f8498b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:29Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.376484 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:29Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.393557 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:29Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.397157 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.397179 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.397187 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.397202 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.397211 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:29Z","lastTransitionTime":"2025-12-02T11:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.403779 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:29Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.416118 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:29Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.431582 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f55051ff5eedef4b085c57ab10b6545bc8d2abdc70c6e15444e4e0adcd5f659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:52:18Z\\\",\\\"message\\\":\\\"2025-12-02T11:51:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5dabad82-8c3a-4b20-9112-a3bba384b288\\\\n2025-12-02T11:51:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5dabad82-8c3a-4b20-9112-a3bba384b288 to /host/opt/cni/bin/\\\\n2025-12-02T11:51:32Z [verbose] multus-daemon started\\\\n2025-12-02T11:51:32Z [verbose] Readiness Indicator file check\\\\n2025-12-02T11:52:17Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:52:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:29Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.446563 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:29Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.467022 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d080fba3e6836b969d692aba325a32aaa82bdd9b6a28e5d893c334bed0568748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:29Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.482735 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:29Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.492777 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:29Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.501561 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.501604 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.501614 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.501631 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.501640 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:29Z","lastTransitionTime":"2025-12-02T11:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.509848 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0983689d174a5aa7c091c0c556c3267513aaad4e56227d986a99dfb4d81d3442\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:51:56Z\\\",\\\"message\\\":\\\"363 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-kube-apiserver/apiserver]} name:Service_openshift-kube-apiserver/apiserver_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.93:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d71b38eb-32af-4c0f-9490-7c317c111e3a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 11:51:56.712367 6363 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-gbxxd in node crc\\\\nI1202 11:51:56.712373 6363 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-gbxxd after 0 failed attempt(s)\\\\nI1202 11:51:56.712378 6363 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-gbxxd\\\\nF1202 11:51:56.712316 6363 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:52:29Z\\\",\\\"message\\\":\\\"oadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1202 11:52:28.723558 6756 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-cflql\\\\nI1202 11:52:28.723570 6756 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1202 11:52:28.723569 6756 lb_config.go:1031] Cluster endpoints for openshift-ingress-operator/metrics for network=default are: map[]\\\\nI1202 11:52:28.723576 6756 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1202 11:52:28.723582 6756 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-target-xd92c in node crc\\\\nF1202 11:52:28.723586 6756 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:29Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.519499 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bddfc4d8-9d79-4b83-9b40-38463890e808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23997cb093043089d5bcaf250c265456c9b5b6ac0bc52ab80b23a7264864ebfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://962a9159d7c11d17506ad7f068479425fcbf5124ef66e41aa2e60c23a8358a31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v2k4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:29Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.529490 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:29Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.539854 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:29Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.603994 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.604031 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.604047 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.604069 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.604085 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:29Z","lastTransitionTime":"2025-12-02T11:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.706788 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.706818 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.706827 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.706841 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.706850 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:29Z","lastTransitionTime":"2025-12-02T11:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.809122 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.809156 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.809164 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.809177 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.809185 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:29Z","lastTransitionTime":"2025-12-02T11:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.839792 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:29 crc kubenswrapper[4721]: E1202 11:52:29.839969 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.840239 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:29 crc kubenswrapper[4721]: E1202 11:52:29.840333 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.840568 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.840653 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:29 crc kubenswrapper[4721]: E1202 11:52:29.840734 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:29 crc kubenswrapper[4721]: E1202 11:52:29.840814 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.911314 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.911372 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.911382 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.911399 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:29 crc kubenswrapper[4721]: I1202 11:52:29.911408 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:29Z","lastTransitionTime":"2025-12-02T11:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.013870 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.013905 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.013914 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.013928 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.013938 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:30Z","lastTransitionTime":"2025-12-02T11:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.116523 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.116589 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.116607 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.116630 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.116656 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:30Z","lastTransitionTime":"2025-12-02T11:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.219543 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.219598 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.219606 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.219620 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.219630 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:30Z","lastTransitionTime":"2025-12-02T11:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.268790 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gbxxd_e2a903a9-d920-48ed-8450-fa2a4fdb458b/ovnkube-controller/3.log" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.272902 4721 scope.go:117] "RemoveContainer" containerID="57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938" Dec 02 11:52:30 crc kubenswrapper[4721]: E1202 11:52:30.273166 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-gbxxd_openshift-ovn-kubernetes(e2a903a9-d920-48ed-8450-fa2a4fdb458b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.293536 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.313670 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.322299 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.322387 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.322407 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.322425 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.322470 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:30Z","lastTransitionTime":"2025-12-02T11:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.329222 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.330530 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.330567 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.330581 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.330602 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.330617 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:30Z","lastTransitionTime":"2025-12-02T11:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.348515 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qf9jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qf9jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: E1202 11:52:30.354732 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.360221 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.360256 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.360270 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.360289 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.360303 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:30Z","lastTransitionTime":"2025-12-02T11:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.368135 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9be7d0c0-4bed-46b7-933f-d20879e184c5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1831ed7c5b757a786e658ca6a57ffa86815b41736b0d2aa32276d8311a9e8e60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0e89a391ffe89140ff679974abf51939d2852bf0c8b707e199be40fc5fc1f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fb6fb086b81deae49d00a8a09742c1fec519578ec195133c817d72e91b5fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbb42628caa9c23ab5e9ba95b03ab5cf41e39c2175e3fbb1871bf08437f8498b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbb42628caa9c23ab5e9ba95b03ab5cf41e39c2175e3fbb1871bf08437f8498b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: E1202 11:52:30.377718 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.381959 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.382002 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.382031 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.382047 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.382056 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:30Z","lastTransitionTime":"2025-12-02T11:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.387788 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: E1202 11:52:30.399162 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.399189 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.402437 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.402467 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.402478 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.402492 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.402503 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:30Z","lastTransitionTime":"2025-12-02T11:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.411670 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: E1202 11:52:30.416075 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.419944 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.420026 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.420047 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.420070 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.420088 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:30Z","lastTransitionTime":"2025-12-02T11:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.426770 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: E1202 11:52:30.435207 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: E1202 11:52:30.435377 4721 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.439305 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f55051ff5eedef4b085c57ab10b6545bc8d2abdc70c6e15444e4e0adcd5f659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:52:18Z\\\",\\\"message\\\":\\\"2025-12-02T11:51:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5dabad82-8c3a-4b20-9112-a3bba384b288\\\\n2025-12-02T11:51:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5dabad82-8c3a-4b20-9112-a3bba384b288 to /host/opt/cni/bin/\\\\n2025-12-02T11:51:32Z [verbose] multus-daemon started\\\\n2025-12-02T11:51:32Z [verbose] Readiness Indicator file check\\\\n2025-12-02T11:52:17Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:52:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.440540 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.440572 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.440583 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.440601 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.440614 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:30Z","lastTransitionTime":"2025-12-02T11:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.449922 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.463280 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d080fba3e6836b969d692aba325a32aaa82bdd9b6a28e5d893c334bed0568748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.474183 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.484315 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.503119 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:52:29Z\\\",\\\"message\\\":\\\"oadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1202 11:52:28.723558 6756 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-cflql\\\\nI1202 11:52:28.723570 6756 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1202 11:52:28.723569 6756 lb_config.go:1031] Cluster endpoints for openshift-ingress-operator/metrics for network=default are: map[]\\\\nI1202 11:52:28.723576 6756 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1202 11:52:28.723582 6756 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-target-xd92c in node crc\\\\nF1202 11:52:28.723586 6756 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:52:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-gbxxd_openshift-ovn-kubernetes(e2a903a9-d920-48ed-8450-fa2a4fdb458b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.512878 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bddfc4d8-9d79-4b83-9b40-38463890e808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23997cb093043089d5bcaf250c265456c9b5b6ac0bc52ab80b23a7264864ebfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://962a9159d7c11d17506ad7f068479425fcbf5124ef66e41aa2e60c23a8358a31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v2k4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.523017 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.532672 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.542996 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.543027 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.543036 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.543066 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.543076 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:30Z","lastTransitionTime":"2025-12-02T11:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.645568 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.645601 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.645611 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.645626 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.645637 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:30Z","lastTransitionTime":"2025-12-02T11:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.747878 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.747937 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.747950 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.747964 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.747994 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:30Z","lastTransitionTime":"2025-12-02T11:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.850009 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.850090 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.850112 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.850137 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.850160 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:30Z","lastTransitionTime":"2025-12-02T11:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.861153 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.876412 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.895756 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.931604 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.951008 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.953095 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.953131 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.953141 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.953154 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.953165 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:30Z","lastTransitionTime":"2025-12-02T11:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.965485 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.976152 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:30 crc kubenswrapper[4721]: I1202 11:52:30.990509 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qf9jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qf9jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:30Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.008576 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9be7d0c0-4bed-46b7-933f-d20879e184c5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1831ed7c5b757a786e658ca6a57ffa86815b41736b0d2aa32276d8311a9e8e60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0e89a391ffe89140ff679974abf51939d2852bf0c8b707e199be40fc5fc1f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fb6fb086b81deae49d00a8a09742c1fec519578ec195133c817d72e91b5fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbb42628caa9c23ab5e9ba95b03ab5cf41e39c2175e3fbb1871bf08437f8498b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbb42628caa9c23ab5e9ba95b03ab5cf41e39c2175e3fbb1871bf08437f8498b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.023994 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.042836 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.055902 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.058007 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.058049 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.058059 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.058074 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.058086 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:31Z","lastTransitionTime":"2025-12-02T11:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.076035 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.094461 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f55051ff5eedef4b085c57ab10b6545bc8d2abdc70c6e15444e4e0adcd5f659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:52:18Z\\\",\\\"message\\\":\\\"2025-12-02T11:51:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5dabad82-8c3a-4b20-9112-a3bba384b288\\\\n2025-12-02T11:51:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5dabad82-8c3a-4b20-9112-a3bba384b288 to /host/opt/cni/bin/\\\\n2025-12-02T11:51:32Z [verbose] multus-daemon started\\\\n2025-12-02T11:51:32Z [verbose] Readiness Indicator file check\\\\n2025-12-02T11:52:17Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:52:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.110425 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.130666 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d080fba3e6836b969d692aba325a32aaa82bdd9b6a28e5d893c334bed0568748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.145241 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bddfc4d8-9d79-4b83-9b40-38463890e808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23997cb093043089d5bcaf250c265456c9b5b6ac0bc52ab80b23a7264864ebfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://962a9159d7c11d17506ad7f068479425fcbf5124ef66e41aa2e60c23a8358a31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v2k4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.160852 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.160894 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.160903 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.160916 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.160926 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:31Z","lastTransitionTime":"2025-12-02T11:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.164078 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:52:29Z\\\",\\\"message\\\":\\\"oadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1202 11:52:28.723558 6756 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-cflql\\\\nI1202 11:52:28.723570 6756 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1202 11:52:28.723569 6756 lb_config.go:1031] Cluster endpoints for openshift-ingress-operator/metrics for network=default are: map[]\\\\nI1202 11:52:28.723576 6756 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1202 11:52:28.723582 6756 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-target-xd92c in node crc\\\\nF1202 11:52:28.723586 6756 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:52:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-gbxxd_openshift-ovn-kubernetes(e2a903a9-d920-48ed-8450-fa2a4fdb458b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:31Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.263555 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.263582 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.263589 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.263602 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.263629 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:31Z","lastTransitionTime":"2025-12-02T11:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.366374 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.366439 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.366463 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.366495 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.366517 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:31Z","lastTransitionTime":"2025-12-02T11:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.469064 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.469108 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.469117 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.469131 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.469142 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:31Z","lastTransitionTime":"2025-12-02T11:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.572576 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.572636 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.572656 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.572680 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.572697 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:31Z","lastTransitionTime":"2025-12-02T11:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.675775 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.675844 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.675861 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.675886 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.675904 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:31Z","lastTransitionTime":"2025-12-02T11:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.778686 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.778813 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.778838 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.778870 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.778891 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:31Z","lastTransitionTime":"2025-12-02T11:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.839760 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.839797 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.839786 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:31 crc kubenswrapper[4721]: E1202 11:52:31.839920 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.839737 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:31 crc kubenswrapper[4721]: E1202 11:52:31.840068 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:31 crc kubenswrapper[4721]: E1202 11:52:31.840143 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:31 crc kubenswrapper[4721]: E1202 11:52:31.840266 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.881761 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.881834 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.881855 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.881884 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.881906 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:31Z","lastTransitionTime":"2025-12-02T11:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.985011 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.985130 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.985152 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.985175 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:31 crc kubenswrapper[4721]: I1202 11:52:31.985192 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:31Z","lastTransitionTime":"2025-12-02T11:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.088488 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.088587 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.088605 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.088630 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.088647 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:32Z","lastTransitionTime":"2025-12-02T11:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.196367 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.196434 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.196474 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.196507 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.196533 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:32Z","lastTransitionTime":"2025-12-02T11:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.299564 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.299642 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.299665 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.299695 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.299715 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:32Z","lastTransitionTime":"2025-12-02T11:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.403457 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.403526 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.403549 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.403579 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.403602 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:32Z","lastTransitionTime":"2025-12-02T11:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.506206 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.506254 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.506270 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.506292 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.506310 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:32Z","lastTransitionTime":"2025-12-02T11:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.609380 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.609461 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.609486 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.609518 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.609542 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:32Z","lastTransitionTime":"2025-12-02T11:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.712689 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.712754 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.712775 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.712800 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.712817 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:32Z","lastTransitionTime":"2025-12-02T11:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.815974 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.816181 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.816203 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.816228 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.816246 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:32Z","lastTransitionTime":"2025-12-02T11:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.920390 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.920437 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.920452 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.920473 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:32 crc kubenswrapper[4721]: I1202 11:52:32.920491 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:32Z","lastTransitionTime":"2025-12-02T11:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.023254 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.023331 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.023426 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.023453 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.023472 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:33Z","lastTransitionTime":"2025-12-02T11:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.126722 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.126774 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.126791 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.126817 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.126834 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:33Z","lastTransitionTime":"2025-12-02T11:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.230247 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.230389 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.230415 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.230445 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.230470 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:33Z","lastTransitionTime":"2025-12-02T11:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.334128 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.334569 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.334726 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.334881 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.335105 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:33Z","lastTransitionTime":"2025-12-02T11:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.438208 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.438560 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.438707 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.438850 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.438983 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:33Z","lastTransitionTime":"2025-12-02T11:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.542716 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.542766 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.542784 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.542809 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.542826 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:33Z","lastTransitionTime":"2025-12-02T11:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.646324 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.646413 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.646442 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.646473 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.646498 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:33Z","lastTransitionTime":"2025-12-02T11:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.715458 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:52:33 crc kubenswrapper[4721]: E1202 11:52:33.715679 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:37.715641295 +0000 UTC m=+147.454191684 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.715767 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.715829 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.715890 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.715971 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:33 crc kubenswrapper[4721]: E1202 11:52:33.716057 4721 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 11:52:33 crc kubenswrapper[4721]: E1202 11:52:33.716126 4721 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 11:52:33 crc kubenswrapper[4721]: E1202 11:52:33.716156 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 11:53:37.716129621 +0000 UTC m=+147.454680010 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 11:52:33 crc kubenswrapper[4721]: E1202 11:52:33.716157 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 11:52:33 crc kubenswrapper[4721]: E1202 11:52:33.716188 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 11:53:37.716174462 +0000 UTC m=+147.454724851 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 11:52:33 crc kubenswrapper[4721]: E1202 11:52:33.716204 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 11:52:33 crc kubenswrapper[4721]: E1202 11:52:33.716229 4721 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:52:33 crc kubenswrapper[4721]: E1202 11:52:33.716260 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 11:52:33 crc kubenswrapper[4721]: E1202 11:52:33.716279 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 11:52:33 crc kubenswrapper[4721]: E1202 11:52:33.716297 4721 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:52:33 crc kubenswrapper[4721]: E1202 11:52:33.716303 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 11:53:37.716279426 +0000 UTC m=+147.454829815 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:52:33 crc kubenswrapper[4721]: E1202 11:52:33.716411 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 11:53:37.716327767 +0000 UTC m=+147.454878156 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.749492 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.749550 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.749567 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.749590 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.749609 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:33Z","lastTransitionTime":"2025-12-02T11:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.838910 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:33 crc kubenswrapper[4721]: E1202 11:52:33.839112 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.839465 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:33 crc kubenswrapper[4721]: E1202 11:52:33.839592 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.839675 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.839681 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:33 crc kubenswrapper[4721]: E1202 11:52:33.839860 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:33 crc kubenswrapper[4721]: E1202 11:52:33.839936 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.851822 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.851874 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.851891 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.851915 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.851933 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:33Z","lastTransitionTime":"2025-12-02T11:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.954659 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.954698 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.954714 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.954738 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:33 crc kubenswrapper[4721]: I1202 11:52:33.954754 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:33Z","lastTransitionTime":"2025-12-02T11:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.057893 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.058044 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.058075 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.058109 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.058132 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:34Z","lastTransitionTime":"2025-12-02T11:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.161512 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.161577 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.161594 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.161619 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.161639 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:34Z","lastTransitionTime":"2025-12-02T11:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.264245 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.264301 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.264316 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.264337 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.264372 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:34Z","lastTransitionTime":"2025-12-02T11:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.367558 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.367668 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.367690 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.367720 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.367743 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:34Z","lastTransitionTime":"2025-12-02T11:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.470275 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.470619 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.470635 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.470655 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.470671 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:34Z","lastTransitionTime":"2025-12-02T11:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.574024 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.574091 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.574106 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.574122 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.574133 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:34Z","lastTransitionTime":"2025-12-02T11:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.677985 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.678190 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.678223 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.678252 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.678270 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:34Z","lastTransitionTime":"2025-12-02T11:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.781513 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.781556 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.781567 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.781587 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.781598 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:34Z","lastTransitionTime":"2025-12-02T11:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.877067 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.884958 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.885030 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.885049 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.885075 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.885095 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:34Z","lastTransitionTime":"2025-12-02T11:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.988958 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.989434 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.989580 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.989711 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:34 crc kubenswrapper[4721]: I1202 11:52:34.989864 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:34Z","lastTransitionTime":"2025-12-02T11:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.093446 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.093521 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.093541 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.093582 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.093605 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:35Z","lastTransitionTime":"2025-12-02T11:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.197166 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.197217 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.197228 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.197247 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.197261 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:35Z","lastTransitionTime":"2025-12-02T11:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.300653 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.300719 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.300734 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.300757 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.300777 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:35Z","lastTransitionTime":"2025-12-02T11:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.403685 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.403745 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.403762 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.403785 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.403803 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:35Z","lastTransitionTime":"2025-12-02T11:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.507215 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.507285 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.507303 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.507331 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.507384 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:35Z","lastTransitionTime":"2025-12-02T11:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.610940 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.611036 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.611055 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.611088 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.611107 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:35Z","lastTransitionTime":"2025-12-02T11:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.714972 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.715058 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.715088 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.715118 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.715142 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:35Z","lastTransitionTime":"2025-12-02T11:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.818726 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.818803 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.818825 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.818852 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.818873 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:35Z","lastTransitionTime":"2025-12-02T11:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.839898 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.840010 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.839912 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.839899 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:35 crc kubenswrapper[4721]: E1202 11:52:35.840184 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:35 crc kubenswrapper[4721]: E1202 11:52:35.840415 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:35 crc kubenswrapper[4721]: E1202 11:52:35.840481 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:35 crc kubenswrapper[4721]: E1202 11:52:35.840548 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.922552 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.922601 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.922612 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.922630 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:35 crc kubenswrapper[4721]: I1202 11:52:35.922643 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:35Z","lastTransitionTime":"2025-12-02T11:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.025423 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.025509 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.025528 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.025562 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.025585 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:36Z","lastTransitionTime":"2025-12-02T11:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.128114 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.128170 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.128187 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.128209 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.128229 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:36Z","lastTransitionTime":"2025-12-02T11:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.231117 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.231187 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.231206 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.231230 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.231251 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:36Z","lastTransitionTime":"2025-12-02T11:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.335845 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.335905 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.335923 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.335952 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.335970 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:36Z","lastTransitionTime":"2025-12-02T11:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.440075 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.440141 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.440161 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.440189 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.440211 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:36Z","lastTransitionTime":"2025-12-02T11:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.544201 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.544283 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.544303 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.544335 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.544385 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:36Z","lastTransitionTime":"2025-12-02T11:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.648412 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.648488 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.648507 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.648536 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.648556 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:36Z","lastTransitionTime":"2025-12-02T11:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.751933 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.751996 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.752014 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.752045 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.752062 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:36Z","lastTransitionTime":"2025-12-02T11:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.854735 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.854798 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.854807 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.854821 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.854849 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:36Z","lastTransitionTime":"2025-12-02T11:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.957983 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.958066 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.958093 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.958123 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:36 crc kubenswrapper[4721]: I1202 11:52:36.958148 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:36Z","lastTransitionTime":"2025-12-02T11:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.061916 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.062004 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.062024 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.062061 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.062097 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:37Z","lastTransitionTime":"2025-12-02T11:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.165619 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.165680 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.165699 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.165729 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.165747 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:37Z","lastTransitionTime":"2025-12-02T11:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.269592 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.269667 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.269687 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.269714 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.269733 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:37Z","lastTransitionTime":"2025-12-02T11:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.371824 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.371906 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.371930 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.371964 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.372006 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:37Z","lastTransitionTime":"2025-12-02T11:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.474531 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.474615 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.474626 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.474642 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.474651 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:37Z","lastTransitionTime":"2025-12-02T11:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.577921 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.577994 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.578013 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.578046 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.578070 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:37Z","lastTransitionTime":"2025-12-02T11:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.680929 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.681395 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.681474 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.681541 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.681600 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:37Z","lastTransitionTime":"2025-12-02T11:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.785149 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.786256 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.786439 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.786587 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.786720 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:37Z","lastTransitionTime":"2025-12-02T11:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.838996 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.839103 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.839134 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:37 crc kubenswrapper[4721]: E1202 11:52:37.839952 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:37 crc kubenswrapper[4721]: E1202 11:52:37.839994 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.839168 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:37 crc kubenswrapper[4721]: E1202 11:52:37.840220 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:37 crc kubenswrapper[4721]: E1202 11:52:37.840504 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.890239 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.890490 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.890528 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.890557 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.890580 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:37Z","lastTransitionTime":"2025-12-02T11:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.994698 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.995191 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.995379 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.995645 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:37 crc kubenswrapper[4721]: I1202 11:52:37.995851 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:37Z","lastTransitionTime":"2025-12-02T11:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.100609 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.100691 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.100710 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.100740 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.100851 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:38Z","lastTransitionTime":"2025-12-02T11:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.204563 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.204648 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.204663 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.204687 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.204701 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:38Z","lastTransitionTime":"2025-12-02T11:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.307060 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.307113 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.307127 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.307145 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.307157 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:38Z","lastTransitionTime":"2025-12-02T11:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.409721 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.409803 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.409816 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.409833 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.409841 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:38Z","lastTransitionTime":"2025-12-02T11:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.513511 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.514041 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.514198 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.514502 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.514756 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:38Z","lastTransitionTime":"2025-12-02T11:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.619378 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.619432 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.619448 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.619465 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.619478 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:38Z","lastTransitionTime":"2025-12-02T11:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.722951 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.723317 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.723495 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.723650 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.723862 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:38Z","lastTransitionTime":"2025-12-02T11:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.826829 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.826894 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.826916 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.826944 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.826963 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:38Z","lastTransitionTime":"2025-12-02T11:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.929054 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.929085 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.929093 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.929105 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:38 crc kubenswrapper[4721]: I1202 11:52:38.929138 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:38Z","lastTransitionTime":"2025-12-02T11:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.031518 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.031563 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.031575 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.031592 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.031604 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:39Z","lastTransitionTime":"2025-12-02T11:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.133857 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.133913 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.133930 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.133958 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.133980 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:39Z","lastTransitionTime":"2025-12-02T11:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.236800 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.236834 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.236843 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.236856 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.236866 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:39Z","lastTransitionTime":"2025-12-02T11:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.340236 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.340559 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.340617 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.340652 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.340677 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:39Z","lastTransitionTime":"2025-12-02T11:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.443322 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.443396 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.443413 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.443434 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.443448 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:39Z","lastTransitionTime":"2025-12-02T11:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.545777 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.545811 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.545839 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.545852 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.545861 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:39Z","lastTransitionTime":"2025-12-02T11:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.648811 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.648852 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.648861 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.648876 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.648886 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:39Z","lastTransitionTime":"2025-12-02T11:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.752030 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.752077 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.752088 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.752109 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.752117 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:39Z","lastTransitionTime":"2025-12-02T11:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.839057 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.839101 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.839106 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:39 crc kubenswrapper[4721]: E1202 11:52:39.839166 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.839074 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:39 crc kubenswrapper[4721]: E1202 11:52:39.839326 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:39 crc kubenswrapper[4721]: E1202 11:52:39.839370 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:39 crc kubenswrapper[4721]: E1202 11:52:39.839427 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.854259 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.854304 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.854318 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.854333 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.854356 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:39Z","lastTransitionTime":"2025-12-02T11:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.956615 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.956683 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.956705 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.956730 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:39 crc kubenswrapper[4721]: I1202 11:52:39.956748 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:39Z","lastTransitionTime":"2025-12-02T11:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.058914 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.058949 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.058960 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.058974 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.058988 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:40Z","lastTransitionTime":"2025-12-02T11:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.161483 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.161706 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.161772 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.161847 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.161914 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:40Z","lastTransitionTime":"2025-12-02T11:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.264372 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.264751 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.264853 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.264950 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.265044 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:40Z","lastTransitionTime":"2025-12-02T11:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.368503 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.368550 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.368561 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.368578 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.368591 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:40Z","lastTransitionTime":"2025-12-02T11:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.472505 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.472545 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.472555 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.472570 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.472579 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:40Z","lastTransitionTime":"2025-12-02T11:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.575066 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.575587 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.575771 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.575939 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.576118 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:40Z","lastTransitionTime":"2025-12-02T11:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.649221 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.649493 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.649649 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.649804 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.649936 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:40Z","lastTransitionTime":"2025-12-02T11:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:40 crc kubenswrapper[4721]: E1202 11:52:40.670370 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.675177 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.675238 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.675261 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.675285 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.675300 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:40Z","lastTransitionTime":"2025-12-02T11:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:40 crc kubenswrapper[4721]: E1202 11:52:40.699322 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.706418 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.706709 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.706875 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.707059 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.707222 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:40Z","lastTransitionTime":"2025-12-02T11:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:40 crc kubenswrapper[4721]: E1202 11:52:40.726966 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.731290 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.731443 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.731538 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.731637 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.731724 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:40Z","lastTransitionTime":"2025-12-02T11:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:40 crc kubenswrapper[4721]: E1202 11:52:40.743440 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.746940 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.746975 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.746986 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.747005 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.747017 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:40Z","lastTransitionTime":"2025-12-02T11:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:40 crc kubenswrapper[4721]: E1202 11:52:40.758367 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:40 crc kubenswrapper[4721]: E1202 11:52:40.758541 4721 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.760238 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.760276 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.760291 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.760310 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.760323 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:40Z","lastTransitionTime":"2025-12-02T11:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.861426 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.862723 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.862760 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.862772 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.862790 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.862801 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:40Z","lastTransitionTime":"2025-12-02T11:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.878379 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9be7d0c0-4bed-46b7-933f-d20879e184c5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1831ed7c5b757a786e658ca6a57ffa86815b41736b0d2aa32276d8311a9e8e60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0e89a391ffe89140ff679974abf51939d2852bf0c8b707e199be40fc5fc1f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fb6fb086b81deae49d00a8a09742c1fec519578ec195133c817d72e91b5fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbb42628caa9c23ab5e9ba95b03ab5cf41e39c2175e3fbb1871bf08437f8498b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbb42628caa9c23ab5e9ba95b03ab5cf41e39c2175e3fbb1871bf08437f8498b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.896765 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.912031 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.926041 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.937502 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.949633 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qf9jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qf9jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.963407 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.964825 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.964854 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.964865 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.964881 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.964892 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:40Z","lastTransitionTime":"2025-12-02T11:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.980056 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d080fba3e6836b969d692aba325a32aaa82bdd9b6a28e5d893c334bed0568748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:40 crc kubenswrapper[4721]: I1202 11:52:40.991632 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e19ff7f9-72ca-4b66-8927-171e75de7e80\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68aedde4e421e308f6547d96dca1464131e81f2715bde922721b3122f5c41eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ea00657786746027504e48917155e9f6dc53c1d733db0a19c6f2cdbbb0310cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ea00657786746027504e48917155e9f6dc53c1d733db0a19c6f2cdbbb0310cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:40Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.004949 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:41Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.016754 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:41Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.026393 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:41Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.037289 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:41Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.049624 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f55051ff5eedef4b085c57ab10b6545bc8d2abdc70c6e15444e4e0adcd5f659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:52:18Z\\\",\\\"message\\\":\\\"2025-12-02T11:51:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5dabad82-8c3a-4b20-9112-a3bba384b288\\\\n2025-12-02T11:51:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5dabad82-8c3a-4b20-9112-a3bba384b288 to /host/opt/cni/bin/\\\\n2025-12-02T11:51:32Z [verbose] multus-daemon started\\\\n2025-12-02T11:51:32Z [verbose] Readiness Indicator file check\\\\n2025-12-02T11:52:17Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:52:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:41Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.066789 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.066854 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.066865 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.066881 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.066893 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:41Z","lastTransitionTime":"2025-12-02T11:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.071823 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:52:29Z\\\",\\\"message\\\":\\\"oadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1202 11:52:28.723558 6756 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-cflql\\\\nI1202 11:52:28.723570 6756 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1202 11:52:28.723569 6756 lb_config.go:1031] Cluster endpoints for openshift-ingress-operator/metrics for network=default are: map[]\\\\nI1202 11:52:28.723576 6756 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1202 11:52:28.723582 6756 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-target-xd92c in node crc\\\\nF1202 11:52:28.723586 6756 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:52:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-gbxxd_openshift-ovn-kubernetes(e2a903a9-d920-48ed-8450-fa2a4fdb458b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:41Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.083584 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bddfc4d8-9d79-4b83-9b40-38463890e808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23997cb093043089d5bcaf250c265456c9b5b6ac0bc52ab80b23a7264864ebfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://962a9159d7c11d17506ad7f068479425fcbf5124ef66e41aa2e60c23a8358a31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v2k4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:41Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.101099 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:41Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.120865 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:41Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.169564 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.169599 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.169610 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.169626 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.169637 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:41Z","lastTransitionTime":"2025-12-02T11:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.271788 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.271860 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.271883 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.271959 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.271986 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:41Z","lastTransitionTime":"2025-12-02T11:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.374682 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.374748 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.374765 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.374788 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.374806 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:41Z","lastTransitionTime":"2025-12-02T11:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.477735 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.477773 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.477785 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.477800 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.477812 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:41Z","lastTransitionTime":"2025-12-02T11:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.581171 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.581231 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.581249 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.581269 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.581282 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:41Z","lastTransitionTime":"2025-12-02T11:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.684219 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.684280 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.684299 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.684323 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.684364 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:41Z","lastTransitionTime":"2025-12-02T11:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.788202 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.788256 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.788271 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.788291 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.788304 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:41Z","lastTransitionTime":"2025-12-02T11:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.839551 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.839577 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.839710 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:41 crc kubenswrapper[4721]: E1202 11:52:41.839927 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.839959 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:41 crc kubenswrapper[4721]: E1202 11:52:41.840068 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:41 crc kubenswrapper[4721]: E1202 11:52:41.840203 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:41 crc kubenswrapper[4721]: E1202 11:52:41.840490 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.891112 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.891182 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.891200 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.891223 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.891241 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:41Z","lastTransitionTime":"2025-12-02T11:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.993456 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.993519 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.993537 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.993562 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:41 crc kubenswrapper[4721]: I1202 11:52:41.993585 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:41Z","lastTransitionTime":"2025-12-02T11:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.096922 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.096986 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.096999 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.097065 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.097078 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:42Z","lastTransitionTime":"2025-12-02T11:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.199600 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.199671 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.199688 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.199713 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.199732 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:42Z","lastTransitionTime":"2025-12-02T11:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.303776 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.303810 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.303820 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.303836 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.303849 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:42Z","lastTransitionTime":"2025-12-02T11:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.406905 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.406969 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.406994 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.407023 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.407043 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:42Z","lastTransitionTime":"2025-12-02T11:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.525123 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.525158 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.525168 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.525182 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.525191 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:42Z","lastTransitionTime":"2025-12-02T11:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.626999 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.627059 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.627089 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.627110 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.627123 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:42Z","lastTransitionTime":"2025-12-02T11:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.729280 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.729315 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.729323 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.729337 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.729367 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:42Z","lastTransitionTime":"2025-12-02T11:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.831895 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.831946 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.831962 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.831984 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.832001 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:42Z","lastTransitionTime":"2025-12-02T11:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.934480 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.934512 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.934520 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.934533 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:42 crc kubenswrapper[4721]: I1202 11:52:42.934543 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:42Z","lastTransitionTime":"2025-12-02T11:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.037224 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.037271 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.037287 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.037307 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.037323 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:43Z","lastTransitionTime":"2025-12-02T11:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.139394 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.139435 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.139448 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.139464 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.139476 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:43Z","lastTransitionTime":"2025-12-02T11:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.242007 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.242094 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.242120 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.242151 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.242178 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:43Z","lastTransitionTime":"2025-12-02T11:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.347085 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.347137 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.347154 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.347173 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.347184 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:43Z","lastTransitionTime":"2025-12-02T11:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.449664 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.449710 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.449721 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.449740 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.449752 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:43Z","lastTransitionTime":"2025-12-02T11:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.552086 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.552169 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.552193 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.552223 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.552248 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:43Z","lastTransitionTime":"2025-12-02T11:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.655556 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.655619 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.655638 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.655666 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.655704 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:43Z","lastTransitionTime":"2025-12-02T11:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.758505 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.758546 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.758555 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.758570 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.758579 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:43Z","lastTransitionTime":"2025-12-02T11:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.839674 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.839722 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.839734 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.839699 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:43 crc kubenswrapper[4721]: E1202 11:52:43.839907 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:43 crc kubenswrapper[4721]: E1202 11:52:43.839994 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:43 crc kubenswrapper[4721]: E1202 11:52:43.841092 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:43 crc kubenswrapper[4721]: E1202 11:52:43.841168 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.841628 4721 scope.go:117] "RemoveContainer" containerID="57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938" Dec 02 11:52:43 crc kubenswrapper[4721]: E1202 11:52:43.841896 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-gbxxd_openshift-ovn-kubernetes(e2a903a9-d920-48ed-8450-fa2a4fdb458b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.861271 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.861461 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.861482 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.861507 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.861523 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:43Z","lastTransitionTime":"2025-12-02T11:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.963991 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.964060 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.964069 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.964083 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:43 crc kubenswrapper[4721]: I1202 11:52:43.964092 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:43Z","lastTransitionTime":"2025-12-02T11:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.066243 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.066283 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.066292 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.066306 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.066314 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:44Z","lastTransitionTime":"2025-12-02T11:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.169662 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.169703 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.169716 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.169733 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.169745 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:44Z","lastTransitionTime":"2025-12-02T11:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.272694 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.272787 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.272807 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.272841 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.272867 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:44Z","lastTransitionTime":"2025-12-02T11:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.375136 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.375176 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.375184 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.375196 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.375204 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:44Z","lastTransitionTime":"2025-12-02T11:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.477170 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.477228 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.477249 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.477279 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.477300 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:44Z","lastTransitionTime":"2025-12-02T11:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.580329 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.580444 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.580468 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.580497 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.580517 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:44Z","lastTransitionTime":"2025-12-02T11:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.683892 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.683952 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.683971 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.683996 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.684012 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:44Z","lastTransitionTime":"2025-12-02T11:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.786977 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.787031 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.787042 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.787059 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.787069 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:44Z","lastTransitionTime":"2025-12-02T11:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.889382 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.889428 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.889439 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.889455 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.889470 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:44Z","lastTransitionTime":"2025-12-02T11:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.992285 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.992327 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.992337 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.992381 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:44 crc kubenswrapper[4721]: I1202 11:52:44.992394 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:44Z","lastTransitionTime":"2025-12-02T11:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.094878 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.094921 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.094931 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.094949 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.094961 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:45Z","lastTransitionTime":"2025-12-02T11:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.197534 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.197586 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.197600 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.197616 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.197628 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:45Z","lastTransitionTime":"2025-12-02T11:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.300875 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.300920 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.300928 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.300944 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.300954 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:45Z","lastTransitionTime":"2025-12-02T11:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.403656 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.403737 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.403769 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.403797 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.403817 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:45Z","lastTransitionTime":"2025-12-02T11:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.506883 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.506950 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.506967 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.506993 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.507012 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:45Z","lastTransitionTime":"2025-12-02T11:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.609998 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.610066 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.610084 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.610117 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.610134 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:45Z","lastTransitionTime":"2025-12-02T11:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.716049 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.716156 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.716193 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.716229 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.716266 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:45Z","lastTransitionTime":"2025-12-02T11:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.819888 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.819953 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.819965 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.819984 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.819998 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:45Z","lastTransitionTime":"2025-12-02T11:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.839584 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.839632 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.839609 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.839737 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:45 crc kubenswrapper[4721]: E1202 11:52:45.839847 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:45 crc kubenswrapper[4721]: E1202 11:52:45.839996 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:45 crc kubenswrapper[4721]: E1202 11:52:45.840459 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:45 crc kubenswrapper[4721]: E1202 11:52:45.840573 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.924138 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.924183 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.924197 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.924217 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:45 crc kubenswrapper[4721]: I1202 11:52:45.924232 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:45Z","lastTransitionTime":"2025-12-02T11:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.027822 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.027880 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.027889 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.027906 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.027915 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:46Z","lastTransitionTime":"2025-12-02T11:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.130072 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.130106 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.130115 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.130126 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.130134 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:46Z","lastTransitionTime":"2025-12-02T11:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.232139 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.232204 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.232221 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.232244 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.232262 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:46Z","lastTransitionTime":"2025-12-02T11:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.334548 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.334622 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.334649 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.334675 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.334694 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:46Z","lastTransitionTime":"2025-12-02T11:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.437863 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.437921 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.437937 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.437959 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.437977 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:46Z","lastTransitionTime":"2025-12-02T11:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.539895 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.539952 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.539972 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.539995 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.540013 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:46Z","lastTransitionTime":"2025-12-02T11:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.643479 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.643544 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.643561 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.643585 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.643605 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:46Z","lastTransitionTime":"2025-12-02T11:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.746597 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.746664 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.746689 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.746718 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.746738 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:46Z","lastTransitionTime":"2025-12-02T11:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.850296 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.850379 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.850397 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.850414 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.850425 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:46Z","lastTransitionTime":"2025-12-02T11:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.953820 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.953881 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.953898 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.953926 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:46 crc kubenswrapper[4721]: I1202 11:52:46.953941 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:46Z","lastTransitionTime":"2025-12-02T11:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.056642 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.056697 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.056711 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.056730 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.056740 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:47Z","lastTransitionTime":"2025-12-02T11:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.159786 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.159835 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.159844 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.159864 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.159882 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:47Z","lastTransitionTime":"2025-12-02T11:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.261742 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.261789 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.261799 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.261813 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.261822 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:47Z","lastTransitionTime":"2025-12-02T11:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.364120 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.364166 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.364176 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.364192 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.364201 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:47Z","lastTransitionTime":"2025-12-02T11:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.466662 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.466741 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.466761 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.466781 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.466795 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:47Z","lastTransitionTime":"2025-12-02T11:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.572567 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.572610 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.572619 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.572635 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.572644 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:47Z","lastTransitionTime":"2025-12-02T11:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.674986 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.675024 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.675033 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.675050 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.675058 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:47Z","lastTransitionTime":"2025-12-02T11:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.777989 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.778057 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.778075 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.778102 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.778120 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:47Z","lastTransitionTime":"2025-12-02T11:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.839379 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.839655 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.839685 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.839750 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:47 crc kubenswrapper[4721]: E1202 11:52:47.839915 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:47 crc kubenswrapper[4721]: E1202 11:52:47.840017 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:47 crc kubenswrapper[4721]: E1202 11:52:47.840112 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:47 crc kubenswrapper[4721]: E1202 11:52:47.840226 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.880962 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.881043 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.881062 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.881086 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.881109 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:47Z","lastTransitionTime":"2025-12-02T11:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.984601 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.984653 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.984666 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.984683 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:47 crc kubenswrapper[4721]: I1202 11:52:47.984695 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:47Z","lastTransitionTime":"2025-12-02T11:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.088168 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.088222 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.088244 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.088263 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.088290 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:48Z","lastTransitionTime":"2025-12-02T11:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.190835 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.190880 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.190895 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.190916 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.190931 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:48Z","lastTransitionTime":"2025-12-02T11:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.293663 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.293755 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.293775 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.293799 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.293818 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:48Z","lastTransitionTime":"2025-12-02T11:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.396662 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.396747 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.396761 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.396778 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.396791 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:48Z","lastTransitionTime":"2025-12-02T11:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.501506 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.501550 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.501564 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.501582 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.501597 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:48Z","lastTransitionTime":"2025-12-02T11:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.609994 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.610039 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.610048 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.610062 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.610070 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:48Z","lastTransitionTime":"2025-12-02T11:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.634595 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-metrics-certs\") pod \"network-metrics-daemon-qf9jp\" (UID: \"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\") " pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:48 crc kubenswrapper[4721]: E1202 11:52:48.634749 4721 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 11:52:48 crc kubenswrapper[4721]: E1202 11:52:48.634807 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-metrics-certs podName:c1b39d16-c46b-40dc-a2a6-600f96eb93f7 nodeName:}" failed. No retries permitted until 2025-12-02 11:53:52.63479187 +0000 UTC m=+162.373342229 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-metrics-certs") pod "network-metrics-daemon-qf9jp" (UID: "c1b39d16-c46b-40dc-a2a6-600f96eb93f7") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.712890 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.712947 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.712962 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.712984 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.713002 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:48Z","lastTransitionTime":"2025-12-02T11:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.815267 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.815332 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.815377 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.815409 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.815425 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:48Z","lastTransitionTime":"2025-12-02T11:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.917699 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.917742 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.917753 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.917768 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:48 crc kubenswrapper[4721]: I1202 11:52:48.917779 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:48Z","lastTransitionTime":"2025-12-02T11:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.020079 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.020112 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.020124 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.020159 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.020169 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:49Z","lastTransitionTime":"2025-12-02T11:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.123136 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.123210 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.123234 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.123266 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.123287 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:49Z","lastTransitionTime":"2025-12-02T11:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.226397 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.226452 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.226473 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.226499 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.226517 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:49Z","lastTransitionTime":"2025-12-02T11:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.329811 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.329861 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.329876 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.329896 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.329913 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:49Z","lastTransitionTime":"2025-12-02T11:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.432736 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.432769 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.432779 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.432792 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.432804 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:49Z","lastTransitionTime":"2025-12-02T11:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.535652 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.535708 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.535720 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.535738 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.535751 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:49Z","lastTransitionTime":"2025-12-02T11:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.638942 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.639289 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.639506 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.639685 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.639841 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:49Z","lastTransitionTime":"2025-12-02T11:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.742117 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.742148 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.742159 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.742173 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.742185 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:49Z","lastTransitionTime":"2025-12-02T11:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.839460 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.839545 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.839667 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:49 crc kubenswrapper[4721]: E1202 11:52:49.839654 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:49 crc kubenswrapper[4721]: E1202 11:52:49.839792 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:49 crc kubenswrapper[4721]: E1202 11:52:49.839908 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.839493 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:49 crc kubenswrapper[4721]: E1202 11:52:49.840669 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.845225 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.845508 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.845701 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.845889 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.846107 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:49Z","lastTransitionTime":"2025-12-02T11:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.949223 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.949286 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.949302 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.949326 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:49 crc kubenswrapper[4721]: I1202 11:52:49.949368 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:49Z","lastTransitionTime":"2025-12-02T11:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.074044 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.074501 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.074686 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.074844 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.074989 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:50Z","lastTransitionTime":"2025-12-02T11:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.177668 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.177706 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.177717 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.177732 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.177743 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:50Z","lastTransitionTime":"2025-12-02T11:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.281068 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.281134 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.281158 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.281185 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.281207 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:50Z","lastTransitionTime":"2025-12-02T11:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.383507 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.383555 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.383567 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.383584 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.383597 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:50Z","lastTransitionTime":"2025-12-02T11:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.486075 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.486480 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.486690 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.486902 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.487183 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:50Z","lastTransitionTime":"2025-12-02T11:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.589848 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.589907 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.589926 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.589952 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.589969 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:50Z","lastTransitionTime":"2025-12-02T11:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.692757 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.692826 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.692864 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.692895 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.692917 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:50Z","lastTransitionTime":"2025-12-02T11:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.795220 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.795607 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.795865 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.795949 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.796035 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:50Z","lastTransitionTime":"2025-12-02T11:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.862074 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:50Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.876449 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mwc5f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4a25fcd-b096-4c52-9072-d7d7c073adb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ee63a604d5e89669ad6d8c0e77fd5b1f42d28fdf00638f3e16e131534bcc5a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8phqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:29Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mwc5f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:50Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.894033 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:50Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.897039 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.897066 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.897077 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.897094 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.897105 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:50Z","lastTransitionTime":"2025-12-02T11:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.907626 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jzz4m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3becf918-fa56-4f68-9963-b19fb91d4343\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f55051ff5eedef4b085c57ab10b6545bc8d2abdc70c6e15444e4e0adcd5f659\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:52:18Z\\\",\\\"message\\\":\\\"2025-12-02T11:51:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_5dabad82-8c3a-4b20-9112-a3bba384b288\\\\n2025-12-02T11:51:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_5dabad82-8c3a-4b20-9112-a3bba384b288 to /host/opt/cni/bin/\\\\n2025-12-02T11:51:32Z [verbose] multus-daemon started\\\\n2025-12-02T11:51:32Z [verbose] Readiness Indicator file check\\\\n2025-12-02T11:52:17Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:31Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:52:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbxk8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jzz4m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:50Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:50 crc kubenswrapper[4721]: E1202 11:52:50.908816 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:50Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.912094 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.912124 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.912135 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.912149 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.912162 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:50Z","lastTransitionTime":"2025-12-02T11:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.920150 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"141842b6-fd0c-48e6-9795-a627e6f96bd4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57d99d56b5f8b233d37325b47bc0eccda5d93e37006b6e01f52ad98964a3d53b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blrdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mcr6s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:50Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:50 crc kubenswrapper[4721]: E1202 11:52:50.922556 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:50Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.925180 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.925299 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.925409 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.925523 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.925611 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:50Z","lastTransitionTime":"2025-12-02T11:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.935953 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cflql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8aa4e714-4184-4fad-ad5c-d7bbf1575cb7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d080fba3e6836b969d692aba325a32aaa82bdd9b6a28e5d893c334bed0568748\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5047c53c52f96dbb51416665c25d3d80fc836d41340f571a755e640cb1345040\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783167923ad3bc094a7285f7329049fced2045953f99efa60c02d88f0b0598b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://247c82d31f90a6ed5c9f4909b038029363903334680c423de37b679d807ff3a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19a2f7eddabf5d7afc53d6c9d8a0151a1dcd771951b7f134129585bd75636565\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ccabd623f8df811ad16fcb1f31d35379fd58b1224278a59f0d7eb12d30c555e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ecae62bac1da9b81c3c82e0ed438b2a937f6cc22422939c15c0261ac3fc5c3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc4xk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cflql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:50Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:50 crc kubenswrapper[4721]: E1202 11:52:50.936552 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:50Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.940828 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.940862 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.940874 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.940891 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.940902 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:50Z","lastTransitionTime":"2025-12-02T11:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.945091 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e19ff7f9-72ca-4b66-8927-171e75de7e80\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68aedde4e421e308f6547d96dca1464131e81f2715bde922721b3122f5c41eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8ea00657786746027504e48917155e9f6dc53c1d733db0a19c6f2cdbbb0310cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ea00657786746027504e48917155e9f6dc53c1d733db0a19c6f2cdbbb0310cd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:50Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:50 crc kubenswrapper[4721]: E1202 11:52:50.951931 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:50Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.954982 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.955014 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.955024 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.955055 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.955064 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:50Z","lastTransitionTime":"2025-12-02T11:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.957598 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5cc7cb4-fabf-4983-afac-501278682319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f5b0edecd295f440b7b5669f9d896eef030c298041e0de190033ff17849b887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fdd899770959cab3ae285d2d61d5f7ae85f79461e3bd7822b30c9d4d07e0d207\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e0a34666e718e652242da2e6a75288545fe4236a70a00f4959f452066e1df61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:50Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:50 crc kubenswrapper[4721]: E1202 11:52:50.965237 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"09bbbf15-d1fe-4d39-8a50-9880fa13aa25\\\",\\\"systemUUID\\\":\\\"a9e321cc-1477-4a2a-bfd1-fa3938fe51c8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:50Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:50 crc kubenswrapper[4721]: E1202 11:52:50.965574 4721 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.967391 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.967412 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.967423 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.967438 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.967449 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:50Z","lastTransitionTime":"2025-12-02T11:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.976292 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2a903a9-d920-48ed-8450-fa2a4fdb458b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T11:52:29Z\\\",\\\"message\\\":\\\"oadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1202 11:52:28.723558 6756 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-cflql\\\\nI1202 11:52:28.723570 6756 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1202 11:52:28.723569 6756 lb_config.go:1031] Cluster endpoints for openshift-ingress-operator/metrics for network=default are: map[]\\\\nI1202 11:52:28.723576 6756 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1202 11:52:28.723582 6756 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-target-xd92c in node crc\\\\nF1202 11:52:28.723586 6756 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:52:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-gbxxd_openshift-ovn-kubernetes(e2a903a9-d920-48ed-8450-fa2a4fdb458b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5c4m9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gbxxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:50Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.987409 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bddfc4d8-9d79-4b83-9b40-38463890e808\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23997cb093043089d5bcaf250c265456c9b5b6ac0bc52ab80b23a7264864ebfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://962a9159d7c11d17506ad7f068479425fcbf5124ef66e41aa2e60c23a8358a31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8fbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v2k4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:50Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:50 crc kubenswrapper[4721]: I1202 11:52:50.998417 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42637b2f582ce9a86cd8b35f73fb8f01895662bb6259fa1905a6de62719b4fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:50Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.010155 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:51Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.021700 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebcbff99fdb970cb02267962b6f6f09b6405a3d1798208e441912eeda9a7660b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:51Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.034120 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c27f266b-3b00-44bd-8872-0dd43d8bd22f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T11:51:29Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1202 11:51:23.785477 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 11:51:23.786438 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4256692477/tls.crt::/tmp/serving-cert-4256692477/tls.key\\\\\\\"\\\\nI1202 11:51:29.024303 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 11:51:29.027029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 11:51:29.027053 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 11:51:29.027078 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 11:51:29.027086 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 11:51:29.032417 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 11:51:29.032477 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032501 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 11:51:29.032522 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 11:51:29.032541 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 11:51:29.032562 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 11:51:29.032586 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 11:51:29.032435 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 11:51:29.037676 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:51Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.045052 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c572e9d3a6486546ae5767e4dfad299e8824f07533dd6057a01370cd4ffc4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44396ab38939bf85fd868ec86dd6023ea36b53a4fea22ab5e25648bdd268456f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:51Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.053810 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-d9hjh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eca136da-2294-4180-9a48-b4ac2547ed50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f34b55e2cc45d8c3f3a4ab70ae99295c3d3cdc1298004b40305796792b12fe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h569b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-d9hjh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:51Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.063214 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qf9jp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4s44b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:44Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qf9jp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:51Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.073504 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.073723 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.073900 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.074006 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.074092 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:51Z","lastTransitionTime":"2025-12-02T11:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.074839 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9be7d0c0-4bed-46b7-933f-d20879e184c5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1831ed7c5b757a786e658ca6a57ffa86815b41736b0d2aa32276d8311a9e8e60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b0e89a391ffe89140ff679974abf51939d2852bf0c8b707e199be40fc5fc1f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fb6fb086b81deae49d00a8a09742c1fec519578ec195133c817d72e91b5fdf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fbb42628caa9c23ab5e9ba95b03ab5cf41e39c2175e3fbb1871bf08437f8498b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fbb42628caa9c23ab5e9ba95b03ab5cf41e39c2175e3fbb1871bf08437f8498b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:51Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.104033 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39d64322-b758-4b2c-b55c-baae30bf10d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T11:51:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9995dd80b46afe27f1e83d67821493dd3b26c1905e4e2a735019118a21ffa05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019e612feeb5cd953c1a684fa40e4bb5ee6c1b984b50da63506e6c92c2412595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://56d560edfb8ce207e4402bb425f645f71ef3ac852bcb63e4063c81a868e7a459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfbc252cef3cc0c739ca28a5f81f4db164a75e9864f322b842459da738265253\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b853ef6251d500cb8c7a175a204a0bc4ec2e0a65165243034e8cbead5fca5d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T11:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7962f16f17b833de57e67b1485f4b790bdcd08edffc84357b2fe91b22efdfec1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73ca1718d9fe14bcbb468affcdec440984563a59fccee30e7487dfbfc1037b55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://731a13e9d06e25174477b3ed2f0ef2d3f46ed569f21f25a0e8e573cad65ddc0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T11:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T11:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T11:51:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T11:52:51Z is after 2025-08-24T17:21:41Z" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.176126 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.176158 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.176168 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.176182 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.176193 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:51Z","lastTransitionTime":"2025-12-02T11:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.278513 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.279425 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.279669 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.279869 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.280073 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:51Z","lastTransitionTime":"2025-12-02T11:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.381637 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.381677 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.381688 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.381704 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.381714 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:51Z","lastTransitionTime":"2025-12-02T11:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.483706 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.483758 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.483774 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.483796 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.483814 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:51Z","lastTransitionTime":"2025-12-02T11:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.586073 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.586127 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.586146 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.586169 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.586185 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:51Z","lastTransitionTime":"2025-12-02T11:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.689086 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.689162 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.689198 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.689227 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.689249 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:51Z","lastTransitionTime":"2025-12-02T11:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.792492 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.792573 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.792600 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.792630 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.792654 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:51Z","lastTransitionTime":"2025-12-02T11:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.839191 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.839299 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.839240 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.839198 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:51 crc kubenswrapper[4721]: E1202 11:52:51.839517 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:51 crc kubenswrapper[4721]: E1202 11:52:51.839655 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:51 crc kubenswrapper[4721]: E1202 11:52:51.839781 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:51 crc kubenswrapper[4721]: E1202 11:52:51.839996 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.896019 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.896109 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.896131 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.896158 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.896177 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:51Z","lastTransitionTime":"2025-12-02T11:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.999319 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.999417 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.999436 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.999462 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:51 crc kubenswrapper[4721]: I1202 11:52:51.999482 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:51Z","lastTransitionTime":"2025-12-02T11:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.102176 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.102220 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.102235 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.102256 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.102269 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:52Z","lastTransitionTime":"2025-12-02T11:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.205201 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.205270 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.205295 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.205319 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.205335 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:52Z","lastTransitionTime":"2025-12-02T11:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.307718 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.307794 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.307816 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.307860 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.307884 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:52Z","lastTransitionTime":"2025-12-02T11:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.409713 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.409785 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.409808 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.409836 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.409857 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:52Z","lastTransitionTime":"2025-12-02T11:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.512762 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.512821 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.512838 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.512862 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.512880 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:52Z","lastTransitionTime":"2025-12-02T11:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.615310 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.615441 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.615467 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.615495 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.615512 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:52Z","lastTransitionTime":"2025-12-02T11:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.717901 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.717959 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.717978 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.718005 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.718022 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:52Z","lastTransitionTime":"2025-12-02T11:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.820676 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.820737 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.820755 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.820779 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.820796 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:52Z","lastTransitionTime":"2025-12-02T11:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.923935 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.924065 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.924092 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.924157 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:52 crc kubenswrapper[4721]: I1202 11:52:52.924182 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:52Z","lastTransitionTime":"2025-12-02T11:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.026327 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.026685 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.026702 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.026726 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.026746 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:53Z","lastTransitionTime":"2025-12-02T11:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.129775 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.129829 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.129846 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.129869 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.129887 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:53Z","lastTransitionTime":"2025-12-02T11:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.232471 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.232537 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.232554 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.232577 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.232595 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:53Z","lastTransitionTime":"2025-12-02T11:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.335515 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.335592 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.335611 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.335636 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.335655 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:53Z","lastTransitionTime":"2025-12-02T11:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.438053 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.438116 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.438136 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.438158 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.438175 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:53Z","lastTransitionTime":"2025-12-02T11:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.540545 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.540612 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.540631 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.540654 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.540672 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:53Z","lastTransitionTime":"2025-12-02T11:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.643517 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.643597 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.643622 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.643649 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.643665 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:53Z","lastTransitionTime":"2025-12-02T11:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.746693 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.746743 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.746758 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.746780 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.746795 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:53Z","lastTransitionTime":"2025-12-02T11:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.839458 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.839522 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.839458 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:53 crc kubenswrapper[4721]: E1202 11:52:53.839698 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.840063 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:53 crc kubenswrapper[4721]: E1202 11:52:53.840089 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:53 crc kubenswrapper[4721]: E1202 11:52:53.840106 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:53 crc kubenswrapper[4721]: E1202 11:52:53.840194 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.849689 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.849749 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.849767 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.849908 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.849942 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:53Z","lastTransitionTime":"2025-12-02T11:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.952371 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.952405 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.952415 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.952433 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:53 crc kubenswrapper[4721]: I1202 11:52:53.952442 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:53Z","lastTransitionTime":"2025-12-02T11:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.055550 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.055616 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.055644 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.055678 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.055703 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:54Z","lastTransitionTime":"2025-12-02T11:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.158004 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.158085 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.158110 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.158142 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.158165 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:54Z","lastTransitionTime":"2025-12-02T11:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.260300 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.260361 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.260375 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.260392 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.260405 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:54Z","lastTransitionTime":"2025-12-02T11:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.361971 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.362007 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.362017 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.362034 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.362046 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:54Z","lastTransitionTime":"2025-12-02T11:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.465225 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.465262 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.465271 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.465285 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.465295 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:54Z","lastTransitionTime":"2025-12-02T11:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.567263 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.567291 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.567367 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.567383 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.567391 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:54Z","lastTransitionTime":"2025-12-02T11:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.669281 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.669320 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.669329 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.669372 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.669382 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:54Z","lastTransitionTime":"2025-12-02T11:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.772042 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.772115 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.772131 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.772159 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.772177 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:54Z","lastTransitionTime":"2025-12-02T11:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.875219 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.875259 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.875267 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.875281 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.875290 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:54Z","lastTransitionTime":"2025-12-02T11:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.977759 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.977812 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.977825 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.977839 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:54 crc kubenswrapper[4721]: I1202 11:52:54.977850 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:54Z","lastTransitionTime":"2025-12-02T11:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.080537 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.080575 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.080584 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.080600 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.080609 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:55Z","lastTransitionTime":"2025-12-02T11:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.182773 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.182833 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.182853 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.182877 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.182893 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:55Z","lastTransitionTime":"2025-12-02T11:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.284709 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.284749 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.284760 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.284776 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.284787 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:55Z","lastTransitionTime":"2025-12-02T11:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.387398 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.387442 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.387453 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.387468 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.387478 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:55Z","lastTransitionTime":"2025-12-02T11:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.489494 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.489530 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.489538 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.489552 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.489560 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:55Z","lastTransitionTime":"2025-12-02T11:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.592069 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.592116 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.592125 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.592143 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.592156 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:55Z","lastTransitionTime":"2025-12-02T11:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.694258 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.694307 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.694317 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.694332 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.694363 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:55Z","lastTransitionTime":"2025-12-02T11:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.795974 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.796027 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.796046 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.796079 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.796096 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:55Z","lastTransitionTime":"2025-12-02T11:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.838803 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.839171 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.839281 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.839172 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:55 crc kubenswrapper[4721]: E1202 11:52:55.839408 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.839423 4721 scope.go:117] "RemoveContainer" containerID="57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938" Dec 02 11:52:55 crc kubenswrapper[4721]: E1202 11:52:55.839313 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:55 crc kubenswrapper[4721]: E1202 11:52:55.839544 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:55 crc kubenswrapper[4721]: E1202 11:52:55.839569 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-gbxxd_openshift-ovn-kubernetes(e2a903a9-d920-48ed-8450-fa2a4fdb458b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" Dec 02 11:52:55 crc kubenswrapper[4721]: E1202 11:52:55.839622 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.898088 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.898163 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.898174 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.898186 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:55 crc kubenswrapper[4721]: I1202 11:52:55.898195 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:55Z","lastTransitionTime":"2025-12-02T11:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.000748 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.000796 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.000808 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.000826 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.000839 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:56Z","lastTransitionTime":"2025-12-02T11:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.103729 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.103797 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.103819 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.103847 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.103868 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:56Z","lastTransitionTime":"2025-12-02T11:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.207015 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.207077 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.207102 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.207134 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.207158 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:56Z","lastTransitionTime":"2025-12-02T11:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.310170 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.310232 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.310249 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.310273 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.310289 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:56Z","lastTransitionTime":"2025-12-02T11:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.412579 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.413271 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.413387 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.413497 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.413596 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:56Z","lastTransitionTime":"2025-12-02T11:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.516895 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.517186 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.517287 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.517397 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.517489 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:56Z","lastTransitionTime":"2025-12-02T11:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.630517 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.630586 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.630604 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.630631 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.630649 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:56Z","lastTransitionTime":"2025-12-02T11:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.733785 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.733829 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.733845 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.733867 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.733883 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:56Z","lastTransitionTime":"2025-12-02T11:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.837116 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.837223 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.837242 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.837265 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.837282 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:56Z","lastTransitionTime":"2025-12-02T11:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.939244 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.939292 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.939304 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.939362 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:56 crc kubenswrapper[4721]: I1202 11:52:56.939377 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:56Z","lastTransitionTime":"2025-12-02T11:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.042728 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.043267 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.043493 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.043759 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.043993 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:57Z","lastTransitionTime":"2025-12-02T11:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.147051 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.147121 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.147140 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.147164 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.147182 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:57Z","lastTransitionTime":"2025-12-02T11:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.249814 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.249876 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.249896 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.249919 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.249938 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:57Z","lastTransitionTime":"2025-12-02T11:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.353078 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.353151 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.353175 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.353201 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.353218 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:57Z","lastTransitionTime":"2025-12-02T11:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.455745 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.455784 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.455793 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.455807 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.455817 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:57Z","lastTransitionTime":"2025-12-02T11:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.558356 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.558403 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.558416 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.558432 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.558443 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:57Z","lastTransitionTime":"2025-12-02T11:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.661011 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.661049 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.661060 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.661074 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.661082 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:57Z","lastTransitionTime":"2025-12-02T11:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.763915 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.763954 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.763963 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.763976 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.763987 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:57Z","lastTransitionTime":"2025-12-02T11:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.838758 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.838820 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.838830 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:57 crc kubenswrapper[4721]: E1202 11:52:57.838903 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:57 crc kubenswrapper[4721]: E1202 11:52:57.839040 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.839073 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:57 crc kubenswrapper[4721]: E1202 11:52:57.839219 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:57 crc kubenswrapper[4721]: E1202 11:52:57.839291 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.866160 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.866202 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.866215 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.866232 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.866243 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:57Z","lastTransitionTime":"2025-12-02T11:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.968917 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.968997 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.969017 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.969042 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:57 crc kubenswrapper[4721]: I1202 11:52:57.969061 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:57Z","lastTransitionTime":"2025-12-02T11:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.071640 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.071684 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.071693 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.071708 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.071717 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:58Z","lastTransitionTime":"2025-12-02T11:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.174025 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.174060 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.174068 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.174081 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.174091 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:58Z","lastTransitionTime":"2025-12-02T11:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.277148 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.277179 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.277189 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.277205 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.277218 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:58Z","lastTransitionTime":"2025-12-02T11:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.379162 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.379196 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.379207 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.379220 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.379230 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:58Z","lastTransitionTime":"2025-12-02T11:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.481640 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.481676 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.481685 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.481700 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.481708 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:58Z","lastTransitionTime":"2025-12-02T11:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.584131 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.584175 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.584184 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.584199 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.584208 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:58Z","lastTransitionTime":"2025-12-02T11:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.686810 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.686871 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.686891 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.686915 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.686931 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:58Z","lastTransitionTime":"2025-12-02T11:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.789579 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.789638 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.789661 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.789690 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.789711 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:58Z","lastTransitionTime":"2025-12-02T11:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.893079 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.893131 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.893166 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.893185 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.893197 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:58Z","lastTransitionTime":"2025-12-02T11:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.995542 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.995568 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.995578 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.995591 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:58 crc kubenswrapper[4721]: I1202 11:52:58.995599 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:58Z","lastTransitionTime":"2025-12-02T11:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.098449 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.098475 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.098483 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.098498 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.098506 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:59Z","lastTransitionTime":"2025-12-02T11:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.201092 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.201500 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.201719 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.201964 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.202327 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:59Z","lastTransitionTime":"2025-12-02T11:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.305350 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.305388 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.305417 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.305431 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.305440 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:59Z","lastTransitionTime":"2025-12-02T11:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.407646 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.407674 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.407682 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.407695 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.407706 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:59Z","lastTransitionTime":"2025-12-02T11:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.510287 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.511220 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.511376 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.511502 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.511591 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:59Z","lastTransitionTime":"2025-12-02T11:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.614047 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.614074 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.614082 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.614094 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.614103 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:59Z","lastTransitionTime":"2025-12-02T11:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.716504 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.716573 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.716596 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.716624 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.716649 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:59Z","lastTransitionTime":"2025-12-02T11:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.818946 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.818976 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.818988 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.819003 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.819013 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:59Z","lastTransitionTime":"2025-12-02T11:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.839635 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.839704 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.839761 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.839639 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:52:59 crc kubenswrapper[4721]: E1202 11:52:59.839861 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:52:59 crc kubenswrapper[4721]: E1202 11:52:59.839723 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:52:59 crc kubenswrapper[4721]: E1202 11:52:59.840031 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:52:59 crc kubenswrapper[4721]: E1202 11:52:59.840140 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.921118 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.921198 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.921220 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.921249 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:52:59 crc kubenswrapper[4721]: I1202 11:52:59.921307 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:52:59Z","lastTransitionTime":"2025-12-02T11:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.023863 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.023905 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.023918 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.023936 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.023949 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:53:00Z","lastTransitionTime":"2025-12-02T11:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.126543 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.126829 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.126936 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.127046 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.127143 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:53:00Z","lastTransitionTime":"2025-12-02T11:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.230416 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.230477 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.230490 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.230507 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.230520 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:53:00Z","lastTransitionTime":"2025-12-02T11:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.332978 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.333023 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.333034 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.333051 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.333066 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:53:00Z","lastTransitionTime":"2025-12-02T11:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.434897 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.434932 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.434941 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.434955 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.434964 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:53:00Z","lastTransitionTime":"2025-12-02T11:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.536712 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.536777 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.536789 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.536805 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.536816 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:53:00Z","lastTransitionTime":"2025-12-02T11:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.638963 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.638991 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.639000 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.639012 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.639021 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:53:00Z","lastTransitionTime":"2025-12-02T11:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.741580 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.741856 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.741925 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.741995 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.742064 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:53:00Z","lastTransitionTime":"2025-12-02T11:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.844069 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.844333 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.844428 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.844534 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.844609 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:53:00Z","lastTransitionTime":"2025-12-02T11:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.884143 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=88.884110014 podStartE2EDuration="1m28.884110014s" podCreationTimestamp="2025-12-02 11:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:00.882955188 +0000 UTC m=+110.621505537" watchObservedRunningTime="2025-12-02 11:53:00.884110014 +0000 UTC m=+110.622660403" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.884546 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=57.884537667000004 podStartE2EDuration="57.884537667s" podCreationTimestamp="2025-12-02 11:52:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:00.857814253 +0000 UTC m=+110.596364602" watchObservedRunningTime="2025-12-02 11:53:00.884537667 +0000 UTC m=+110.623088056" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.900862 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=91.900845953 podStartE2EDuration="1m31.900845953s" podCreationTimestamp="2025-12-02 11:51:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:00.899811261 +0000 UTC m=+110.638361650" watchObservedRunningTime="2025-12-02 11:53:00.900845953 +0000 UTC m=+110.639396342" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.929570 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-d9hjh" podStartSLOduration=90.929545661 podStartE2EDuration="1m30.929545661s" podCreationTimestamp="2025-12-02 11:51:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:00.928330532 +0000 UTC m=+110.666880951" watchObservedRunningTime="2025-12-02 11:53:00.929545661 +0000 UTC m=+110.668096050" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.946752 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.946790 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.946802 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.946816 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.946828 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:53:00Z","lastTransitionTime":"2025-12-02T11:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.991094 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podStartSLOduration=90.991072268 podStartE2EDuration="1m30.991072268s" podCreationTimestamp="2025-12-02 11:51:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:00.958131906 +0000 UTC m=+110.696682265" watchObservedRunningTime="2025-12-02 11:53:00.991072268 +0000 UTC m=+110.729622617" Dec 02 11:53:00 crc kubenswrapper[4721]: I1202 11:53:00.991265 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-cflql" podStartSLOduration=89.991257594 podStartE2EDuration="1m29.991257594s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:00.990455708 +0000 UTC m=+110.729006057" watchObservedRunningTime="2025-12-02 11:53:00.991257594 +0000 UTC m=+110.729807963" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.015721 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=90.015701747 podStartE2EDuration="1m30.015701747s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:01.01548412 +0000 UTC m=+110.754034469" watchObservedRunningTime="2025-12-02 11:53:01.015701747 +0000 UTC m=+110.754252096" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.016038 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=27.016030477 podStartE2EDuration="27.016030477s" podCreationTimestamp="2025-12-02 11:52:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:01.00283986 +0000 UTC m=+110.741390219" watchObservedRunningTime="2025-12-02 11:53:01.016030477 +0000 UTC m=+110.754580826" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.037713 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-mwc5f" podStartSLOduration=93.037689742 podStartE2EDuration="1m33.037689742s" podCreationTimestamp="2025-12-02 11:51:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:01.0369712 +0000 UTC m=+110.775521559" watchObservedRunningTime="2025-12-02 11:53:01.037689742 +0000 UTC m=+110.776240091" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.048763 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.048807 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.048818 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.048833 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.048843 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:53:01Z","lastTransitionTime":"2025-12-02T11:53:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.091602 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-jzz4m" podStartSLOduration=91.091582957 podStartE2EDuration="1m31.091582957s" podCreationTimestamp="2025-12-02 11:51:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:01.069680244 +0000 UTC m=+110.808230613" watchObservedRunningTime="2025-12-02 11:53:01.091582957 +0000 UTC m=+110.830133306" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.119248 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v2k4s" podStartSLOduration=90.119231942 podStartE2EDuration="1m30.119231942s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:01.10272766 +0000 UTC m=+110.841278009" watchObservedRunningTime="2025-12-02 11:53:01.119231942 +0000 UTC m=+110.857782291" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.151212 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.151248 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.151257 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.151269 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.151277 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:53:01Z","lastTransitionTime":"2025-12-02T11:53:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.253088 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.253292 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.253377 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.253455 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.253523 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:53:01Z","lastTransitionTime":"2025-12-02T11:53:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.254563 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.254665 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.254735 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.254804 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.254867 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T11:53:01Z","lastTransitionTime":"2025-12-02T11:53:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.290399 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-jtsnt"] Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.290991 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-jtsnt" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.293133 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.293646 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.294437 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.294573 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.372007 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/e83cfdb5-12c1-4304-be2a-1cea3e0f90fb-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-jtsnt\" (UID: \"e83cfdb5-12c1-4304-be2a-1cea3e0f90fb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-jtsnt" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.372080 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e83cfdb5-12c1-4304-be2a-1cea3e0f90fb-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-jtsnt\" (UID: \"e83cfdb5-12c1-4304-be2a-1cea3e0f90fb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-jtsnt" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.372115 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e83cfdb5-12c1-4304-be2a-1cea3e0f90fb-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-jtsnt\" (UID: \"e83cfdb5-12c1-4304-be2a-1cea3e0f90fb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-jtsnt" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.372151 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/e83cfdb5-12c1-4304-be2a-1cea3e0f90fb-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-jtsnt\" (UID: \"e83cfdb5-12c1-4304-be2a-1cea3e0f90fb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-jtsnt" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.372167 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e83cfdb5-12c1-4304-be2a-1cea3e0f90fb-service-ca\") pod \"cluster-version-operator-5c965bbfc6-jtsnt\" (UID: \"e83cfdb5-12c1-4304-be2a-1cea3e0f90fb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-jtsnt" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.474007 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e83cfdb5-12c1-4304-be2a-1cea3e0f90fb-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-jtsnt\" (UID: \"e83cfdb5-12c1-4304-be2a-1cea3e0f90fb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-jtsnt" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.474076 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/e83cfdb5-12c1-4304-be2a-1cea3e0f90fb-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-jtsnt\" (UID: \"e83cfdb5-12c1-4304-be2a-1cea3e0f90fb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-jtsnt" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.474100 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e83cfdb5-12c1-4304-be2a-1cea3e0f90fb-service-ca\") pod \"cluster-version-operator-5c965bbfc6-jtsnt\" (UID: \"e83cfdb5-12c1-4304-be2a-1cea3e0f90fb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-jtsnt" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.474147 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/e83cfdb5-12c1-4304-be2a-1cea3e0f90fb-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-jtsnt\" (UID: \"e83cfdb5-12c1-4304-be2a-1cea3e0f90fb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-jtsnt" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.474185 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e83cfdb5-12c1-4304-be2a-1cea3e0f90fb-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-jtsnt\" (UID: \"e83cfdb5-12c1-4304-be2a-1cea3e0f90fb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-jtsnt" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.474512 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/e83cfdb5-12c1-4304-be2a-1cea3e0f90fb-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-jtsnt\" (UID: \"e83cfdb5-12c1-4304-be2a-1cea3e0f90fb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-jtsnt" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.475010 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/e83cfdb5-12c1-4304-be2a-1cea3e0f90fb-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-jtsnt\" (UID: \"e83cfdb5-12c1-4304-be2a-1cea3e0f90fb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-jtsnt" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.475407 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e83cfdb5-12c1-4304-be2a-1cea3e0f90fb-service-ca\") pod \"cluster-version-operator-5c965bbfc6-jtsnt\" (UID: \"e83cfdb5-12c1-4304-be2a-1cea3e0f90fb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-jtsnt" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.487002 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e83cfdb5-12c1-4304-be2a-1cea3e0f90fb-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-jtsnt\" (UID: \"e83cfdb5-12c1-4304-be2a-1cea3e0f90fb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-jtsnt" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.491082 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e83cfdb5-12c1-4304-be2a-1cea3e0f90fb-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-jtsnt\" (UID: \"e83cfdb5-12c1-4304-be2a-1cea3e0f90fb\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-jtsnt" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.603075 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-jtsnt" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.839070 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.839115 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.839128 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:53:01 crc kubenswrapper[4721]: I1202 11:53:01.839326 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:53:01 crc kubenswrapper[4721]: E1202 11:53:01.839394 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:53:01 crc kubenswrapper[4721]: E1202 11:53:01.839683 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:53:01 crc kubenswrapper[4721]: E1202 11:53:01.839295 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:53:01 crc kubenswrapper[4721]: E1202 11:53:01.839820 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:53:02 crc kubenswrapper[4721]: I1202 11:53:02.384852 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-jtsnt" event={"ID":"e83cfdb5-12c1-4304-be2a-1cea3e0f90fb","Type":"ContainerStarted","Data":"9ac5ed703e30caa706c50f1e75bc635b58fdba45a33de0e71e97d43aa5fccba0"} Dec 02 11:53:02 crc kubenswrapper[4721]: I1202 11:53:02.384901 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-jtsnt" event={"ID":"e83cfdb5-12c1-4304-be2a-1cea3e0f90fb","Type":"ContainerStarted","Data":"fa6f5fea475863b798f0da9ffe99f8186ec0209d33e89d03d059b65f44b43fdb"} Dec 02 11:53:02 crc kubenswrapper[4721]: I1202 11:53:02.397610 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-jtsnt" podStartSLOduration=92.397593203 podStartE2EDuration="1m32.397593203s" podCreationTimestamp="2025-12-02 11:51:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:02.397263542 +0000 UTC m=+112.135813921" watchObservedRunningTime="2025-12-02 11:53:02.397593203 +0000 UTC m=+112.136143542" Dec 02 11:53:03 crc kubenswrapper[4721]: I1202 11:53:03.839270 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:53:03 crc kubenswrapper[4721]: I1202 11:53:03.839311 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:53:03 crc kubenswrapper[4721]: E1202 11:53:03.839763 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:53:03 crc kubenswrapper[4721]: I1202 11:53:03.839383 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:53:03 crc kubenswrapper[4721]: I1202 11:53:03.839311 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:53:03 crc kubenswrapper[4721]: E1202 11:53:03.839902 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:53:03 crc kubenswrapper[4721]: E1202 11:53:03.840047 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:53:03 crc kubenswrapper[4721]: E1202 11:53:03.840088 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:53:04 crc kubenswrapper[4721]: I1202 11:53:04.392945 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jzz4m_3becf918-fa56-4f68-9963-b19fb91d4343/kube-multus/1.log" Dec 02 11:53:04 crc kubenswrapper[4721]: I1202 11:53:04.398799 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jzz4m_3becf918-fa56-4f68-9963-b19fb91d4343/kube-multus/0.log" Dec 02 11:53:04 crc kubenswrapper[4721]: I1202 11:53:04.398850 4721 generic.go:334] "Generic (PLEG): container finished" podID="3becf918-fa56-4f68-9963-b19fb91d4343" containerID="4f55051ff5eedef4b085c57ab10b6545bc8d2abdc70c6e15444e4e0adcd5f659" exitCode=1 Dec 02 11:53:04 crc kubenswrapper[4721]: I1202 11:53:04.398883 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jzz4m" event={"ID":"3becf918-fa56-4f68-9963-b19fb91d4343","Type":"ContainerDied","Data":"4f55051ff5eedef4b085c57ab10b6545bc8d2abdc70c6e15444e4e0adcd5f659"} Dec 02 11:53:04 crc kubenswrapper[4721]: I1202 11:53:04.399120 4721 scope.go:117] "RemoveContainer" containerID="ce3d4e72173f37dd9952913154dc0879e5bdc43d6fa74b876432b4b607f0905e" Dec 02 11:53:04 crc kubenswrapper[4721]: I1202 11:53:04.399208 4721 scope.go:117] "RemoveContainer" containerID="4f55051ff5eedef4b085c57ab10b6545bc8d2abdc70c6e15444e4e0adcd5f659" Dec 02 11:53:04 crc kubenswrapper[4721]: E1202 11:53:04.399619 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-jzz4m_openshift-multus(3becf918-fa56-4f68-9963-b19fb91d4343)\"" pod="openshift-multus/multus-jzz4m" podUID="3becf918-fa56-4f68-9963-b19fb91d4343" Dec 02 11:53:05 crc kubenswrapper[4721]: I1202 11:53:05.402396 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jzz4m_3becf918-fa56-4f68-9963-b19fb91d4343/kube-multus/1.log" Dec 02 11:53:05 crc kubenswrapper[4721]: I1202 11:53:05.839472 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:53:05 crc kubenswrapper[4721]: I1202 11:53:05.839477 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:53:05 crc kubenswrapper[4721]: I1202 11:53:05.839504 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:53:05 crc kubenswrapper[4721]: E1202 11:53:05.839788 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:53:05 crc kubenswrapper[4721]: I1202 11:53:05.839951 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:53:05 crc kubenswrapper[4721]: E1202 11:53:05.839969 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:53:05 crc kubenswrapper[4721]: E1202 11:53:05.839992 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:53:05 crc kubenswrapper[4721]: E1202 11:53:05.840125 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:53:06 crc kubenswrapper[4721]: I1202 11:53:06.840934 4721 scope.go:117] "RemoveContainer" containerID="57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938" Dec 02 11:53:06 crc kubenswrapper[4721]: E1202 11:53:06.841244 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-gbxxd_openshift-ovn-kubernetes(e2a903a9-d920-48ed-8450-fa2a4fdb458b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" Dec 02 11:53:07 crc kubenswrapper[4721]: I1202 11:53:07.839317 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:53:07 crc kubenswrapper[4721]: I1202 11:53:07.839434 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:53:07 crc kubenswrapper[4721]: I1202 11:53:07.839382 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:53:07 crc kubenswrapper[4721]: I1202 11:53:07.839388 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:53:07 crc kubenswrapper[4721]: E1202 11:53:07.839532 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:53:07 crc kubenswrapper[4721]: E1202 11:53:07.839699 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:53:07 crc kubenswrapper[4721]: E1202 11:53:07.839793 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:53:07 crc kubenswrapper[4721]: E1202 11:53:07.840092 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:53:09 crc kubenswrapper[4721]: I1202 11:53:09.839090 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:53:09 crc kubenswrapper[4721]: I1202 11:53:09.839192 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:53:09 crc kubenswrapper[4721]: I1202 11:53:09.839192 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:53:09 crc kubenswrapper[4721]: E1202 11:53:09.839395 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:53:09 crc kubenswrapper[4721]: I1202 11:53:09.839440 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:53:09 crc kubenswrapper[4721]: E1202 11:53:09.839576 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:53:09 crc kubenswrapper[4721]: E1202 11:53:09.839660 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:53:09 crc kubenswrapper[4721]: E1202 11:53:09.839711 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:53:10 crc kubenswrapper[4721]: E1202 11:53:10.812778 4721 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 02 11:53:10 crc kubenswrapper[4721]: E1202 11:53:10.971764 4721 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 02 11:53:11 crc kubenswrapper[4721]: I1202 11:53:11.839157 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:53:11 crc kubenswrapper[4721]: I1202 11:53:11.839206 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:53:11 crc kubenswrapper[4721]: I1202 11:53:11.839238 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:53:11 crc kubenswrapper[4721]: I1202 11:53:11.839322 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:53:11 crc kubenswrapper[4721]: E1202 11:53:11.839513 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:53:11 crc kubenswrapper[4721]: E1202 11:53:11.839676 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:53:11 crc kubenswrapper[4721]: E1202 11:53:11.839791 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:53:11 crc kubenswrapper[4721]: E1202 11:53:11.839980 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:53:13 crc kubenswrapper[4721]: I1202 11:53:13.838807 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:53:13 crc kubenswrapper[4721]: I1202 11:53:13.838842 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:53:13 crc kubenswrapper[4721]: I1202 11:53:13.838843 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:53:13 crc kubenswrapper[4721]: I1202 11:53:13.839012 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:53:13 crc kubenswrapper[4721]: E1202 11:53:13.838968 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:53:13 crc kubenswrapper[4721]: E1202 11:53:13.839217 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:53:13 crc kubenswrapper[4721]: E1202 11:53:13.839280 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:53:13 crc kubenswrapper[4721]: E1202 11:53:13.839386 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:53:15 crc kubenswrapper[4721]: I1202 11:53:15.839791 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:53:15 crc kubenswrapper[4721]: I1202 11:53:15.839879 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:53:15 crc kubenswrapper[4721]: I1202 11:53:15.839811 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:53:15 crc kubenswrapper[4721]: I1202 11:53:15.839811 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:53:15 crc kubenswrapper[4721]: E1202 11:53:15.840068 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:53:15 crc kubenswrapper[4721]: E1202 11:53:15.840170 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:53:15 crc kubenswrapper[4721]: E1202 11:53:15.840268 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:53:15 crc kubenswrapper[4721]: E1202 11:53:15.840455 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:53:15 crc kubenswrapper[4721]: E1202 11:53:15.973141 4721 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 02 11:53:17 crc kubenswrapper[4721]: I1202 11:53:17.839595 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:53:17 crc kubenswrapper[4721]: I1202 11:53:17.839651 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:53:17 crc kubenswrapper[4721]: E1202 11:53:17.839801 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:53:17 crc kubenswrapper[4721]: I1202 11:53:17.839925 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:53:17 crc kubenswrapper[4721]: I1202 11:53:17.839936 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:53:17 crc kubenswrapper[4721]: E1202 11:53:17.840078 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:53:17 crc kubenswrapper[4721]: E1202 11:53:17.840162 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:53:17 crc kubenswrapper[4721]: E1202 11:53:17.840281 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:53:19 crc kubenswrapper[4721]: I1202 11:53:19.839620 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:53:19 crc kubenswrapper[4721]: I1202 11:53:19.839692 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:53:19 crc kubenswrapper[4721]: E1202 11:53:19.839752 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:53:19 crc kubenswrapper[4721]: E1202 11:53:19.839877 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:53:19 crc kubenswrapper[4721]: I1202 11:53:19.839931 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:53:19 crc kubenswrapper[4721]: E1202 11:53:19.839976 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:53:19 crc kubenswrapper[4721]: I1202 11:53:19.840296 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:53:19 crc kubenswrapper[4721]: I1202 11:53:19.840350 4721 scope.go:117] "RemoveContainer" containerID="4f55051ff5eedef4b085c57ab10b6545bc8d2abdc70c6e15444e4e0adcd5f659" Dec 02 11:53:19 crc kubenswrapper[4721]: E1202 11:53:19.840439 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:53:20 crc kubenswrapper[4721]: I1202 11:53:20.450805 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jzz4m_3becf918-fa56-4f68-9963-b19fb91d4343/kube-multus/1.log" Dec 02 11:53:20 crc kubenswrapper[4721]: I1202 11:53:20.451073 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jzz4m" event={"ID":"3becf918-fa56-4f68-9963-b19fb91d4343","Type":"ContainerStarted","Data":"9b74c183f5d95883599f7ec24d387308f5c3735a7602b3d28efbfc1e8a1e4f8a"} Dec 02 11:53:20 crc kubenswrapper[4721]: E1202 11:53:20.974685 4721 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 02 11:53:21 crc kubenswrapper[4721]: I1202 11:53:21.839258 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:53:21 crc kubenswrapper[4721]: I1202 11:53:21.839410 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:53:21 crc kubenswrapper[4721]: I1202 11:53:21.839473 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:53:21 crc kubenswrapper[4721]: I1202 11:53:21.839490 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:53:21 crc kubenswrapper[4721]: E1202 11:53:21.839581 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:53:21 crc kubenswrapper[4721]: E1202 11:53:21.839727 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:53:21 crc kubenswrapper[4721]: I1202 11:53:21.839763 4721 scope.go:117] "RemoveContainer" containerID="57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938" Dec 02 11:53:21 crc kubenswrapper[4721]: E1202 11:53:21.839905 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:53:21 crc kubenswrapper[4721]: E1202 11:53:21.840028 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:53:22 crc kubenswrapper[4721]: I1202 11:53:22.457733 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gbxxd_e2a903a9-d920-48ed-8450-fa2a4fdb458b/ovnkube-controller/3.log" Dec 02 11:53:22 crc kubenswrapper[4721]: I1202 11:53:22.460068 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" event={"ID":"e2a903a9-d920-48ed-8450-fa2a4fdb458b","Type":"ContainerStarted","Data":"22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282"} Dec 02 11:53:22 crc kubenswrapper[4721]: I1202 11:53:22.460505 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:53:22 crc kubenswrapper[4721]: I1202 11:53:22.487956 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" podStartSLOduration=111.487941452 podStartE2EDuration="1m51.487941452s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:22.487179368 +0000 UTC m=+132.225729727" watchObservedRunningTime="2025-12-02 11:53:22.487941452 +0000 UTC m=+132.226491801" Dec 02 11:53:22 crc kubenswrapper[4721]: I1202 11:53:22.656480 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-qf9jp"] Dec 02 11:53:22 crc kubenswrapper[4721]: I1202 11:53:22.656611 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:53:22 crc kubenswrapper[4721]: E1202 11:53:22.656710 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:53:23 crc kubenswrapper[4721]: I1202 11:53:23.839274 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:53:23 crc kubenswrapper[4721]: I1202 11:53:23.839314 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:53:23 crc kubenswrapper[4721]: I1202 11:53:23.839278 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:53:23 crc kubenswrapper[4721]: E1202 11:53:23.839431 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:53:23 crc kubenswrapper[4721]: E1202 11:53:23.839478 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:53:23 crc kubenswrapper[4721]: E1202 11:53:23.839533 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:53:24 crc kubenswrapper[4721]: I1202 11:53:24.838721 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:53:24 crc kubenswrapper[4721]: E1202 11:53:24.838851 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qf9jp" podUID="c1b39d16-c46b-40dc-a2a6-600f96eb93f7" Dec 02 11:53:25 crc kubenswrapper[4721]: I1202 11:53:25.839130 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:53:25 crc kubenswrapper[4721]: I1202 11:53:25.839176 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:53:25 crc kubenswrapper[4721]: I1202 11:53:25.839178 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:53:25 crc kubenswrapper[4721]: E1202 11:53:25.839308 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 11:53:25 crc kubenswrapper[4721]: E1202 11:53:25.839469 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 11:53:25 crc kubenswrapper[4721]: E1202 11:53:25.839608 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 11:53:26 crc kubenswrapper[4721]: I1202 11:53:26.838784 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:53:26 crc kubenswrapper[4721]: I1202 11:53:26.840938 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 02 11:53:26 crc kubenswrapper[4721]: I1202 11:53:26.844557 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 02 11:53:27 crc kubenswrapper[4721]: I1202 11:53:27.839299 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:53:27 crc kubenswrapper[4721]: I1202 11:53:27.839416 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:53:27 crc kubenswrapper[4721]: I1202 11:53:27.839440 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:53:27 crc kubenswrapper[4721]: I1202 11:53:27.841398 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 02 11:53:27 crc kubenswrapper[4721]: I1202 11:53:27.841595 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 02 11:53:27 crc kubenswrapper[4721]: I1202 11:53:27.842232 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 02 11:53:27 crc kubenswrapper[4721]: I1202 11:53:27.845095 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 02 11:53:30 crc kubenswrapper[4721]: I1202 11:53:30.051561 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.841567 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.886117 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-zjwgh"] Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.886757 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-zjwgh" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.887217 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-fg2ch"] Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.887592 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-fg2ch" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.888175 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s"] Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.888677 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.889306 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl"] Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.889573 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.889620 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.889623 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.890477 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-vhhcx"] Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.890844 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.891011 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-vhhcx" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.891787 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-pf5mn"] Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.892226 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pf5mn" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.893093 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-57995"] Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.893464 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-57995" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.893869 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-rv854"] Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.894231 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-rv854" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.894915 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-tl5t9"] Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.895253 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-tl5t9" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.895684 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-bj7cn"] Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.896252 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bj7cn" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.897297 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xwtjj"] Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.897784 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xwtjj" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.898263 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.898472 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-brktp"] Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.898677 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.898802 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-brktp" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.902344 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.902569 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.904373 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.904532 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.904658 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.904759 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.904897 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.905050 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.905108 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.905161 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.905052 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.906105 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.906157 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.906807 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-5cfgp"] Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.907290 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-5cfgp" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.908248 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tgqm"] Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.908834 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tgqm" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.909806 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.909941 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-qx7nr"] Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.910052 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.923445 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-qx7nr" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.923635 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.923881 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.923929 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.937708 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.938120 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.938240 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.938337 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.938760 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.938895 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.939123 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.939281 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.939387 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.939552 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.939789 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.939865 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.939794 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.940068 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.940055 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.940240 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.941114 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-wtlwb"] Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.941694 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v2gnh"] Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.941855 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.942010 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.942105 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v2gnh" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.942143 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.942234 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-tkr2s"] Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.942250 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.942420 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.939335 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.942477 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-wtlwb" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.939564 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.942691 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.942764 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.942609 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.943026 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.943119 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.943231 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.943233 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.948928 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.949209 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.949337 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.949393 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.949676 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.949839 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.949920 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.950054 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.950186 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.950405 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.950526 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.950675 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.950788 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.950929 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.950940 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.951162 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.951273 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.951420 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.951513 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.950693 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.951039 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.951850 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.951941 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.951448 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.951484 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.952280 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.952400 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.951843 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rb6cf"] Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.952509 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.952581 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.952527 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.952199 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.952988 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-8wrt4"] Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.953612 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8wrt4" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.953854 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rb6cf" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.954092 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dwqdt"] Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.954442 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dwqdt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.954872 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-clflg"] Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.955980 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-clflg" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.956369 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-ctcl6"] Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.956851 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ctcl6" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.958552 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-zjwgh"] Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.960024 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.967895 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xf8pm"] Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.968754 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-wb9r7"] Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.969209 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.969510 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-drfrd"] Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.969577 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xf8pm" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.970204 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-drfrd" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.970952 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.971013 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.971170 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.971186 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.971282 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.972236 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.972485 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.971285 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.987662 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.988449 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.990227 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 02 11:53:31 crc kubenswrapper[4721]: I1202 11:53:31.995690 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gtb8k"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.011490 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-gtb8k" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.011966 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.012471 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.012627 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.012737 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.013405 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.013457 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a26573b3-3899-4b81-ba01-fabea24142cc-metrics-certs\") pod \"router-default-5444994796-wtlwb\" (UID: \"a26573b3-3899-4b81-ba01-fabea24142cc\") " pod="openshift-ingress/router-default-5444994796-wtlwb" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.013477 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/33a6b5de-5ce1-4e23-b13d-feda684e25e0-serving-cert\") pod \"etcd-operator-b45778765-5cfgp\" (UID: \"33a6b5de-5ce1-4e23-b13d-feda684e25e0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5cfgp" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.013508 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82qh8\" (UniqueName: \"kubernetes.io/projected/54beb89c-d2c3-42c8-a186-465ebcbb4e4b-kube-api-access-82qh8\") pod \"console-f9d7485db-tl5t9\" (UID: \"54beb89c-d2c3-42c8-a186-465ebcbb4e4b\") " pod="openshift-console/console-f9d7485db-tl5t9" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.013538 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b464a77f-4bf7-4719-98c2-d07e5bde858a-config\") pod \"machine-api-operator-5694c8668f-zjwgh\" (UID: \"b464a77f-4bf7-4719-98c2-d07e5bde858a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zjwgh" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.013558 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a275db4b-1993-4d42-b623-2bc621a59042-auth-proxy-config\") pod \"machine-approver-56656f9798-pf5mn\" (UID: \"a275db4b-1993-4d42-b623-2bc621a59042\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pf5mn" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.013584 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9556d\" (UniqueName: \"kubernetes.io/projected/a275db4b-1993-4d42-b623-2bc621a59042-kube-api-access-9556d\") pod \"machine-approver-56656f9798-pf5mn\" (UID: \"a275db4b-1993-4d42-b623-2bc621a59042\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pf5mn" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.013645 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9g2v\" (UniqueName: \"kubernetes.io/projected/bc778281-fc4f-4592-a805-e03aac7643b1-kube-api-access-n9g2v\") pod \"openshift-apiserver-operator-796bbdcf4f-57995\" (UID: \"bc778281-fc4f-4592-a805-e03aac7643b1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-57995" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.013673 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjt7h\" (UniqueName: \"kubernetes.io/projected/b464a77f-4bf7-4719-98c2-d07e5bde858a-kube-api-access-bjt7h\") pod \"machine-api-operator-5694c8668f-zjwgh\" (UID: \"b464a77f-4bf7-4719-98c2-d07e5bde858a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zjwgh" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.013693 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/54beb89c-d2c3-42c8-a186-465ebcbb4e4b-console-config\") pod \"console-f9d7485db-tl5t9\" (UID: \"54beb89c-d2c3-42c8-a186-465ebcbb4e4b\") " pod="openshift-console/console-f9d7485db-tl5t9" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.013714 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a26573b3-3899-4b81-ba01-fabea24142cc-service-ca-bundle\") pod \"router-default-5444994796-wtlwb\" (UID: \"a26573b3-3899-4b81-ba01-fabea24142cc\") " pod="openshift-ingress/router-default-5444994796-wtlwb" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.013738 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0f706843-68b8-4598-9482-9c1361f60a63-serving-cert\") pod \"apiserver-7bbb656c7d-qpb9s\" (UID: \"0f706843-68b8-4598-9482-9c1361f60a63\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.013758 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b464a77f-4bf7-4719-98c2-d07e5bde858a-images\") pod \"machine-api-operator-5694c8668f-zjwgh\" (UID: \"b464a77f-4bf7-4719-98c2-d07e5bde858a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zjwgh" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.013775 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/984ee29c-596a-48e8-80a4-cf8fda878e7f-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-8tgqm\" (UID: \"984ee29c-596a-48e8-80a4-cf8fda878e7f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tgqm" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.013792 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/984ee29c-596a-48e8-80a4-cf8fda878e7f-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-8tgqm\" (UID: \"984ee29c-596a-48e8-80a4-cf8fda878e7f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tgqm" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.013807 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a275db4b-1993-4d42-b623-2bc621a59042-config\") pod \"machine-approver-56656f9798-pf5mn\" (UID: \"a275db4b-1993-4d42-b623-2bc621a59042\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pf5mn" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.013823 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc778281-fc4f-4592-a805-e03aac7643b1-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-57995\" (UID: \"bc778281-fc4f-4592-a805-e03aac7643b1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-57995" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.013839 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac4ba204-8204-441b-9e16-5ec66a277675-config\") pod \"authentication-operator-69f744f599-vhhcx\" (UID: \"ac4ba204-8204-441b-9e16-5ec66a277675\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vhhcx" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.013856 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/995ba10c-c212-4afd-a95e-a03cbac4d08e-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-v2gnh\" (UID: \"995ba10c-c212-4afd-a95e-a03cbac4d08e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v2gnh" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.013872 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0f706843-68b8-4598-9482-9c1361f60a63-etcd-client\") pod \"apiserver-7bbb656c7d-qpb9s\" (UID: \"0f706843-68b8-4598-9482-9c1361f60a63\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.013899 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/995ba10c-c212-4afd-a95e-a03cbac4d08e-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-v2gnh\" (UID: \"995ba10c-c212-4afd-a95e-a03cbac4d08e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v2gnh" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.013913 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/a26573b3-3899-4b81-ba01-fabea24142cc-stats-auth\") pod \"router-default-5444994796-wtlwb\" (UID: \"a26573b3-3899-4b81-ba01-fabea24142cc\") " pod="openshift-ingress/router-default-5444994796-wtlwb" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.013929 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vfb4\" (UniqueName: \"kubernetes.io/projected/984ee29c-596a-48e8-80a4-cf8fda878e7f-kube-api-access-9vfb4\") pod \"cluster-image-registry-operator-dc59b4c8b-8tgqm\" (UID: \"984ee29c-596a-48e8-80a4-cf8fda878e7f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tgqm" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.013943 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/54beb89c-d2c3-42c8-a186-465ebcbb4e4b-trusted-ca-bundle\") pod \"console-f9d7485db-tl5t9\" (UID: \"54beb89c-d2c3-42c8-a186-465ebcbb4e4b\") " pod="openshift-console/console-f9d7485db-tl5t9" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.013976 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/33a6b5de-5ce1-4e23-b13d-feda684e25e0-etcd-service-ca\") pod \"etcd-operator-b45778765-5cfgp\" (UID: \"33a6b5de-5ce1-4e23-b13d-feda684e25e0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5cfgp" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014005 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aa569267-90f6-47ac-8cf0-467ba17c8e06-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-xwtjj\" (UID: \"aa569267-90f6-47ac-8cf0-467ba17c8e06\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xwtjj" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014027 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7z8s\" (UniqueName: \"kubernetes.io/projected/78c789c6-4ac0-4a7b-8cb6-b072909ccf03-kube-api-access-m7z8s\") pod \"downloads-7954f5f757-qx7nr\" (UID: \"78c789c6-4ac0-4a7b-8cb6-b072909ccf03\") " pod="openshift-console/downloads-7954f5f757-qx7nr" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014047 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c02678af-0ebc-4689-ad5c-1300cb95439e-client-ca\") pod \"controller-manager-879f6c89f-fg2ch\" (UID: \"c02678af-0ebc-4689-ad5c-1300cb95439e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fg2ch" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014068 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ac4ba204-8204-441b-9e16-5ec66a277675-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-vhhcx\" (UID: \"ac4ba204-8204-441b-9e16-5ec66a277675\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vhhcx" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014087 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0f706843-68b8-4598-9482-9c1361f60a63-audit-policies\") pod \"apiserver-7bbb656c7d-qpb9s\" (UID: \"0f706843-68b8-4598-9482-9c1361f60a63\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014106 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac4ba204-8204-441b-9e16-5ec66a277675-serving-cert\") pod \"authentication-operator-69f744f599-vhhcx\" (UID: \"ac4ba204-8204-441b-9e16-5ec66a277675\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vhhcx" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014125 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rc2hd\" (UniqueName: \"kubernetes.io/projected/213e4819-aa67-42ad-954c-5a461b370407-kube-api-access-rc2hd\") pod \"dns-operator-744455d44c-brktp\" (UID: \"213e4819-aa67-42ad-954c-5a461b370407\") " pod="openshift-dns-operator/dns-operator-744455d44c-brktp" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014146 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c02678af-0ebc-4689-ad5c-1300cb95439e-config\") pod \"controller-manager-879f6c89f-fg2ch\" (UID: \"c02678af-0ebc-4689-ad5c-1300cb95439e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fg2ch" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014203 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1f4e7fae-d9a4-4f20-8658-4544860eaced-client-ca\") pod \"route-controller-manager-6576b87f9c-kfmnl\" (UID: \"1f4e7fae-d9a4-4f20-8658-4544860eaced\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014222 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f4e7fae-d9a4-4f20-8658-4544860eaced-config\") pod \"route-controller-manager-6576b87f9c-kfmnl\" (UID: \"1f4e7fae-d9a4-4f20-8658-4544860eaced\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014268 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/0f706843-68b8-4598-9482-9c1361f60a63-encryption-config\") pod \"apiserver-7bbb656c7d-qpb9s\" (UID: \"0f706843-68b8-4598-9482-9c1361f60a63\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014301 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0f706843-68b8-4598-9482-9c1361f60a63-audit-dir\") pod \"apiserver-7bbb656c7d-qpb9s\" (UID: \"0f706843-68b8-4598-9482-9c1361f60a63\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014331 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0863475f-c2fe-4b3c-abca-eaecb1e26561-serving-cert\") pod \"openshift-config-operator-7777fb866f-bj7cn\" (UID: \"0863475f-c2fe-4b3c-abca-eaecb1e26561\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bj7cn" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014359 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dpkw\" (UniqueName: \"kubernetes.io/projected/0863475f-c2fe-4b3c-abca-eaecb1e26561-kube-api-access-8dpkw\") pod \"openshift-config-operator-7777fb866f-bj7cn\" (UID: \"0863475f-c2fe-4b3c-abca-eaecb1e26561\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bj7cn" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014376 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bz9c5\" (UniqueName: \"kubernetes.io/projected/c02678af-0ebc-4689-ad5c-1300cb95439e-kube-api-access-bz9c5\") pod \"controller-manager-879f6c89f-fg2ch\" (UID: \"c02678af-0ebc-4689-ad5c-1300cb95439e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fg2ch" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014397 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xb2mc\" (UniqueName: \"kubernetes.io/projected/7d94bdd0-bfa5-4e54-a39a-8d8f64e10f99-kube-api-access-xb2mc\") pod \"console-operator-58897d9998-rv854\" (UID: \"7d94bdd0-bfa5-4e54-a39a-8d8f64e10f99\") " pod="openshift-console-operator/console-operator-58897d9998-rv854" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014418 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/0863475f-c2fe-4b3c-abca-eaecb1e26561-available-featuregates\") pod \"openshift-config-operator-7777fb866f-bj7cn\" (UID: \"0863475f-c2fe-4b3c-abca-eaecb1e26561\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bj7cn" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014436 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33a6b5de-5ce1-4e23-b13d-feda684e25e0-config\") pod \"etcd-operator-b45778765-5cfgp\" (UID: \"33a6b5de-5ce1-4e23-b13d-feda684e25e0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5cfgp" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014453 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gv94b\" (UniqueName: \"kubernetes.io/projected/a26573b3-3899-4b81-ba01-fabea24142cc-kube-api-access-gv94b\") pod \"router-default-5444994796-wtlwb\" (UID: \"a26573b3-3899-4b81-ba01-fabea24142cc\") " pod="openshift-ingress/router-default-5444994796-wtlwb" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014466 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/0f706843-68b8-4598-9482-9c1361f60a63-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-qpb9s\" (UID: \"0f706843-68b8-4598-9482-9c1361f60a63\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014488 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d94bdd0-bfa5-4e54-a39a-8d8f64e10f99-serving-cert\") pod \"console-operator-58897d9998-rv854\" (UID: \"7d94bdd0-bfa5-4e54-a39a-8d8f64e10f99\") " pod="openshift-console-operator/console-operator-58897d9998-rv854" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014502 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/33a6b5de-5ce1-4e23-b13d-feda684e25e0-etcd-client\") pod \"etcd-operator-b45778765-5cfgp\" (UID: \"33a6b5de-5ce1-4e23-b13d-feda684e25e0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5cfgp" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014516 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/984ee29c-596a-48e8-80a4-cf8fda878e7f-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-8tgqm\" (UID: \"984ee29c-596a-48e8-80a4-cf8fda878e7f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tgqm" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014532 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ac4ba204-8204-441b-9e16-5ec66a277675-service-ca-bundle\") pod \"authentication-operator-69f744f599-vhhcx\" (UID: \"ac4ba204-8204-441b-9e16-5ec66a277675\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vhhcx" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014549 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/33a6b5de-5ce1-4e23-b13d-feda684e25e0-etcd-ca\") pod \"etcd-operator-b45778765-5cfgp\" (UID: \"33a6b5de-5ce1-4e23-b13d-feda684e25e0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5cfgp" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014563 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zq4rh\" (UniqueName: \"kubernetes.io/projected/33a6b5de-5ce1-4e23-b13d-feda684e25e0-kube-api-access-zq4rh\") pod \"etcd-operator-b45778765-5cfgp\" (UID: \"33a6b5de-5ce1-4e23-b13d-feda684e25e0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5cfgp" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014604 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/b464a77f-4bf7-4719-98c2-d07e5bde858a-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-zjwgh\" (UID: \"b464a77f-4bf7-4719-98c2-d07e5bde858a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zjwgh" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014628 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/a26573b3-3899-4b81-ba01-fabea24142cc-default-certificate\") pod \"router-default-5444994796-wtlwb\" (UID: \"a26573b3-3899-4b81-ba01-fabea24142cc\") " pod="openshift-ingress/router-default-5444994796-wtlwb" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014644 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/54beb89c-d2c3-42c8-a186-465ebcbb4e4b-console-serving-cert\") pod \"console-f9d7485db-tl5t9\" (UID: \"54beb89c-d2c3-42c8-a186-465ebcbb4e4b\") " pod="openshift-console/console-f9d7485db-tl5t9" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014660 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/a275db4b-1993-4d42-b623-2bc621a59042-machine-approver-tls\") pod \"machine-approver-56656f9798-pf5mn\" (UID: \"a275db4b-1993-4d42-b623-2bc621a59042\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pf5mn" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014674 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc778281-fc4f-4592-a805-e03aac7643b1-config\") pod \"openshift-apiserver-operator-796bbdcf4f-57995\" (UID: \"bc778281-fc4f-4592-a805-e03aac7643b1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-57995" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014689 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/54beb89c-d2c3-42c8-a186-465ebcbb4e4b-console-oauth-config\") pod \"console-f9d7485db-tl5t9\" (UID: \"54beb89c-d2c3-42c8-a186-465ebcbb4e4b\") " pod="openshift-console/console-f9d7485db-tl5t9" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014704 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c02678af-0ebc-4689-ad5c-1300cb95439e-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-fg2ch\" (UID: \"c02678af-0ebc-4689-ad5c-1300cb95439e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fg2ch" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014729 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f4e7fae-d9a4-4f20-8658-4544860eaced-serving-cert\") pod \"route-controller-manager-6576b87f9c-kfmnl\" (UID: \"1f4e7fae-d9a4-4f20-8658-4544860eaced\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014744 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/213e4819-aa67-42ad-954c-5a461b370407-metrics-tls\") pod \"dns-operator-744455d44c-brktp\" (UID: \"213e4819-aa67-42ad-954c-5a461b370407\") " pod="openshift-dns-operator/dns-operator-744455d44c-brktp" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014761 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7d94bdd0-bfa5-4e54-a39a-8d8f64e10f99-trusted-ca\") pod \"console-operator-58897d9998-rv854\" (UID: \"7d94bdd0-bfa5-4e54-a39a-8d8f64e10f99\") " pod="openshift-console-operator/console-operator-58897d9998-rv854" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014777 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qvjx\" (UniqueName: \"kubernetes.io/projected/aa569267-90f6-47ac-8cf0-467ba17c8e06-kube-api-access-6qvjx\") pod \"openshift-controller-manager-operator-756b6f6bc6-xwtjj\" (UID: \"aa569267-90f6-47ac-8cf0-467ba17c8e06\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xwtjj" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014800 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0f706843-68b8-4598-9482-9c1361f60a63-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-qpb9s\" (UID: \"0f706843-68b8-4598-9482-9c1361f60a63\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014815 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jsf4\" (UniqueName: \"kubernetes.io/projected/0f706843-68b8-4598-9482-9c1361f60a63-kube-api-access-8jsf4\") pod \"apiserver-7bbb656c7d-qpb9s\" (UID: \"0f706843-68b8-4598-9482-9c1361f60a63\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014834 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/995ba10c-c212-4afd-a95e-a03cbac4d08e-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-v2gnh\" (UID: \"995ba10c-c212-4afd-a95e-a03cbac4d08e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v2gnh" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014851 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d94bdd0-bfa5-4e54-a39a-8d8f64e10f99-config\") pod \"console-operator-58897d9998-rv854\" (UID: \"7d94bdd0-bfa5-4e54-a39a-8d8f64e10f99\") " pod="openshift-console-operator/console-operator-58897d9998-rv854" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014869 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c02678af-0ebc-4689-ad5c-1300cb95439e-serving-cert\") pod \"controller-manager-879f6c89f-fg2ch\" (UID: \"c02678af-0ebc-4689-ad5c-1300cb95439e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fg2ch" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014946 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnsb9\" (UniqueName: \"kubernetes.io/projected/ac4ba204-8204-441b-9e16-5ec66a277675-kube-api-access-tnsb9\") pod \"authentication-operator-69f744f599-vhhcx\" (UID: \"ac4ba204-8204-441b-9e16-5ec66a277675\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vhhcx" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.014972 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/54beb89c-d2c3-42c8-a186-465ebcbb4e4b-oauth-serving-cert\") pod \"console-f9d7485db-tl5t9\" (UID: \"54beb89c-d2c3-42c8-a186-465ebcbb4e4b\") " pod="openshift-console/console-f9d7485db-tl5t9" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.015007 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa569267-90f6-47ac-8cf0-467ba17c8e06-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-xwtjj\" (UID: \"aa569267-90f6-47ac-8cf0-467ba17c8e06\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xwtjj" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.015027 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/54beb89c-d2c3-42c8-a186-465ebcbb4e4b-service-ca\") pod \"console-f9d7485db-tl5t9\" (UID: \"54beb89c-d2c3-42c8-a186-465ebcbb4e4b\") " pod="openshift-console/console-f9d7485db-tl5t9" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.015049 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m96xx\" (UniqueName: \"kubernetes.io/projected/1f4e7fae-d9a4-4f20-8658-4544860eaced-kube-api-access-m96xx\") pod \"route-controller-manager-6576b87f9c-kfmnl\" (UID: \"1f4e7fae-d9a4-4f20-8658-4544860eaced\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.015290 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.015801 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.017977 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-66mxw"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.018768 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-66mxw" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.019472 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.023218 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v8wzq"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.023683 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v6mzl"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.023692 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v8wzq" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.024483 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v6mzl" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.024934 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-n2z7b"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.025552 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-n2z7b" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.026664 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-q8vvn"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.027212 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411265-f5k6f"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.027622 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.028261 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411265-f5k6f" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.028608 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-bl9q7"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.029076 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-bl9q7" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.030011 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-94q47"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.030425 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-94q47" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.032308 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xwtjj"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.032336 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-857kt"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.032909 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-857kt" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.033250 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-bj7cn"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.034834 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-x7gjh"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.035874 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-x7gjh" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.039316 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-fg2ch"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.039369 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-vhhcx"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.039389 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rgmvn"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.039954 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rgmvn" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.042600 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.042783 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-mkdl7"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.043486 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.043559 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-mkdl7" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.054199 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-5cfgp"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.057277 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-57995"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.058555 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.058723 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gtb8k"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.060278 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-tl5t9"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.066798 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xf8pm"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.067590 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v2gnh"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.068399 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.068571 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-qx7nr"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.069945 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-rv854"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.071638 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-8wrt4"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.076430 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-ctcl6"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.077390 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-bl9q7"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.079508 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-66mxw"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.079730 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-tkr2s"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.081548 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-n2z7b"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.084054 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-94q47"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.085904 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.087069 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v6mzl"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.091764 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-brktp"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.094641 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-clflg"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.095208 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.105095 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v8wzq"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.110712 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.114252 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dwqdt"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116149 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c02678af-0ebc-4689-ad5c-1300cb95439e-config\") pod \"controller-manager-879f6c89f-fg2ch\" (UID: \"c02678af-0ebc-4689-ad5c-1300cb95439e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fg2ch" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116181 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1f4e7fae-d9a4-4f20-8658-4544860eaced-client-ca\") pod \"route-controller-manager-6576b87f9c-kfmnl\" (UID: \"1f4e7fae-d9a4-4f20-8658-4544860eaced\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116200 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac4ba204-8204-441b-9e16-5ec66a277675-serving-cert\") pod \"authentication-operator-69f744f599-vhhcx\" (UID: \"ac4ba204-8204-441b-9e16-5ec66a277675\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vhhcx" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116216 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rc2hd\" (UniqueName: \"kubernetes.io/projected/213e4819-aa67-42ad-954c-5a461b370407-kube-api-access-rc2hd\") pod \"dns-operator-744455d44c-brktp\" (UID: \"213e4819-aa67-42ad-954c-5a461b370407\") " pod="openshift-dns-operator/dns-operator-744455d44c-brktp" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116233 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f4e7fae-d9a4-4f20-8658-4544860eaced-config\") pod \"route-controller-manager-6576b87f9c-kfmnl\" (UID: \"1f4e7fae-d9a4-4f20-8658-4544860eaced\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116248 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/0f706843-68b8-4598-9482-9c1361f60a63-encryption-config\") pod \"apiserver-7bbb656c7d-qpb9s\" (UID: \"0f706843-68b8-4598-9482-9c1361f60a63\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116261 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0f706843-68b8-4598-9482-9c1361f60a63-audit-dir\") pod \"apiserver-7bbb656c7d-qpb9s\" (UID: \"0f706843-68b8-4598-9482-9c1361f60a63\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116285 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0863475f-c2fe-4b3c-abca-eaecb1e26561-serving-cert\") pod \"openshift-config-operator-7777fb866f-bj7cn\" (UID: \"0863475f-c2fe-4b3c-abca-eaecb1e26561\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bj7cn" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116308 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dpkw\" (UniqueName: \"kubernetes.io/projected/0863475f-c2fe-4b3c-abca-eaecb1e26561-kube-api-access-8dpkw\") pod \"openshift-config-operator-7777fb866f-bj7cn\" (UID: \"0863475f-c2fe-4b3c-abca-eaecb1e26561\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bj7cn" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116324 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bz9c5\" (UniqueName: \"kubernetes.io/projected/c02678af-0ebc-4689-ad5c-1300cb95439e-kube-api-access-bz9c5\") pod \"controller-manager-879f6c89f-fg2ch\" (UID: \"c02678af-0ebc-4689-ad5c-1300cb95439e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fg2ch" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116355 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33a6b5de-5ce1-4e23-b13d-feda684e25e0-config\") pod \"etcd-operator-b45778765-5cfgp\" (UID: \"33a6b5de-5ce1-4e23-b13d-feda684e25e0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5cfgp" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116371 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xb2mc\" (UniqueName: \"kubernetes.io/projected/7d94bdd0-bfa5-4e54-a39a-8d8f64e10f99-kube-api-access-xb2mc\") pod \"console-operator-58897d9998-rv854\" (UID: \"7d94bdd0-bfa5-4e54-a39a-8d8f64e10f99\") " pod="openshift-console-operator/console-operator-58897d9998-rv854" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116388 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/0863475f-c2fe-4b3c-abca-eaecb1e26561-available-featuregates\") pod \"openshift-config-operator-7777fb866f-bj7cn\" (UID: \"0863475f-c2fe-4b3c-abca-eaecb1e26561\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bj7cn" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116410 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gv94b\" (UniqueName: \"kubernetes.io/projected/a26573b3-3899-4b81-ba01-fabea24142cc-kube-api-access-gv94b\") pod \"router-default-5444994796-wtlwb\" (UID: \"a26573b3-3899-4b81-ba01-fabea24142cc\") " pod="openshift-ingress/router-default-5444994796-wtlwb" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116431 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/0f706843-68b8-4598-9482-9c1361f60a63-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-qpb9s\" (UID: \"0f706843-68b8-4598-9482-9c1361f60a63\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116451 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d94bdd0-bfa5-4e54-a39a-8d8f64e10f99-serving-cert\") pod \"console-operator-58897d9998-rv854\" (UID: \"7d94bdd0-bfa5-4e54-a39a-8d8f64e10f99\") " pod="openshift-console-operator/console-operator-58897d9998-rv854" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116468 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/33a6b5de-5ce1-4e23-b13d-feda684e25e0-etcd-client\") pod \"etcd-operator-b45778765-5cfgp\" (UID: \"33a6b5de-5ce1-4e23-b13d-feda684e25e0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5cfgp" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116482 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ac4ba204-8204-441b-9e16-5ec66a277675-service-ca-bundle\") pod \"authentication-operator-69f744f599-vhhcx\" (UID: \"ac4ba204-8204-441b-9e16-5ec66a277675\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vhhcx" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116496 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/33a6b5de-5ce1-4e23-b13d-feda684e25e0-etcd-ca\") pod \"etcd-operator-b45778765-5cfgp\" (UID: \"33a6b5de-5ce1-4e23-b13d-feda684e25e0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5cfgp" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116514 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zq4rh\" (UniqueName: \"kubernetes.io/projected/33a6b5de-5ce1-4e23-b13d-feda684e25e0-kube-api-access-zq4rh\") pod \"etcd-operator-b45778765-5cfgp\" (UID: \"33a6b5de-5ce1-4e23-b13d-feda684e25e0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5cfgp" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116544 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/984ee29c-596a-48e8-80a4-cf8fda878e7f-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-8tgqm\" (UID: \"984ee29c-596a-48e8-80a4-cf8fda878e7f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tgqm" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116560 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/b464a77f-4bf7-4719-98c2-d07e5bde858a-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-zjwgh\" (UID: \"b464a77f-4bf7-4719-98c2-d07e5bde858a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zjwgh" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116575 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/a26573b3-3899-4b81-ba01-fabea24142cc-default-certificate\") pod \"router-default-5444994796-wtlwb\" (UID: \"a26573b3-3899-4b81-ba01-fabea24142cc\") " pod="openshift-ingress/router-default-5444994796-wtlwb" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116591 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/54beb89c-d2c3-42c8-a186-465ebcbb4e4b-console-serving-cert\") pod \"console-f9d7485db-tl5t9\" (UID: \"54beb89c-d2c3-42c8-a186-465ebcbb4e4b\") " pod="openshift-console/console-f9d7485db-tl5t9" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116613 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/a275db4b-1993-4d42-b623-2bc621a59042-machine-approver-tls\") pod \"machine-approver-56656f9798-pf5mn\" (UID: \"a275db4b-1993-4d42-b623-2bc621a59042\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pf5mn" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116629 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc778281-fc4f-4592-a805-e03aac7643b1-config\") pod \"openshift-apiserver-operator-796bbdcf4f-57995\" (UID: \"bc778281-fc4f-4592-a805-e03aac7643b1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-57995" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116643 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c02678af-0ebc-4689-ad5c-1300cb95439e-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-fg2ch\" (UID: \"c02678af-0ebc-4689-ad5c-1300cb95439e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fg2ch" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116656 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/54beb89c-d2c3-42c8-a186-465ebcbb4e4b-console-oauth-config\") pod \"console-f9d7485db-tl5t9\" (UID: \"54beb89c-d2c3-42c8-a186-465ebcbb4e4b\") " pod="openshift-console/console-f9d7485db-tl5t9" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116672 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f4e7fae-d9a4-4f20-8658-4544860eaced-serving-cert\") pod \"route-controller-manager-6576b87f9c-kfmnl\" (UID: \"1f4e7fae-d9a4-4f20-8658-4544860eaced\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116688 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/213e4819-aa67-42ad-954c-5a461b370407-metrics-tls\") pod \"dns-operator-744455d44c-brktp\" (UID: \"213e4819-aa67-42ad-954c-5a461b370407\") " pod="openshift-dns-operator/dns-operator-744455d44c-brktp" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116704 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7d94bdd0-bfa5-4e54-a39a-8d8f64e10f99-trusted-ca\") pod \"console-operator-58897d9998-rv854\" (UID: \"7d94bdd0-bfa5-4e54-a39a-8d8f64e10f99\") " pod="openshift-console-operator/console-operator-58897d9998-rv854" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116720 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qvjx\" (UniqueName: \"kubernetes.io/projected/aa569267-90f6-47ac-8cf0-467ba17c8e06-kube-api-access-6qvjx\") pod \"openshift-controller-manager-operator-756b6f6bc6-xwtjj\" (UID: \"aa569267-90f6-47ac-8cf0-467ba17c8e06\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xwtjj" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116744 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0f706843-68b8-4598-9482-9c1361f60a63-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-qpb9s\" (UID: \"0f706843-68b8-4598-9482-9c1361f60a63\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116758 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jsf4\" (UniqueName: \"kubernetes.io/projected/0f706843-68b8-4598-9482-9c1361f60a63-kube-api-access-8jsf4\") pod \"apiserver-7bbb656c7d-qpb9s\" (UID: \"0f706843-68b8-4598-9482-9c1361f60a63\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116775 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/995ba10c-c212-4afd-a95e-a03cbac4d08e-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-v2gnh\" (UID: \"995ba10c-c212-4afd-a95e-a03cbac4d08e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v2gnh" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116790 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d94bdd0-bfa5-4e54-a39a-8d8f64e10f99-config\") pod \"console-operator-58897d9998-rv854\" (UID: \"7d94bdd0-bfa5-4e54-a39a-8d8f64e10f99\") " pod="openshift-console-operator/console-operator-58897d9998-rv854" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116805 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c02678af-0ebc-4689-ad5c-1300cb95439e-serving-cert\") pod \"controller-manager-879f6c89f-fg2ch\" (UID: \"c02678af-0ebc-4689-ad5c-1300cb95439e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fg2ch" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116821 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnsb9\" (UniqueName: \"kubernetes.io/projected/ac4ba204-8204-441b-9e16-5ec66a277675-kube-api-access-tnsb9\") pod \"authentication-operator-69f744f599-vhhcx\" (UID: \"ac4ba204-8204-441b-9e16-5ec66a277675\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vhhcx" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116838 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/54beb89c-d2c3-42c8-a186-465ebcbb4e4b-oauth-serving-cert\") pod \"console-f9d7485db-tl5t9\" (UID: \"54beb89c-d2c3-42c8-a186-465ebcbb4e4b\") " pod="openshift-console/console-f9d7485db-tl5t9" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116853 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa569267-90f6-47ac-8cf0-467ba17c8e06-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-xwtjj\" (UID: \"aa569267-90f6-47ac-8cf0-467ba17c8e06\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xwtjj" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116868 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/54beb89c-d2c3-42c8-a186-465ebcbb4e4b-service-ca\") pod \"console-f9d7485db-tl5t9\" (UID: \"54beb89c-d2c3-42c8-a186-465ebcbb4e4b\") " pod="openshift-console/console-f9d7485db-tl5t9" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116884 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m96xx\" (UniqueName: \"kubernetes.io/projected/1f4e7fae-d9a4-4f20-8658-4544860eaced-kube-api-access-m96xx\") pod \"route-controller-manager-6576b87f9c-kfmnl\" (UID: \"1f4e7fae-d9a4-4f20-8658-4544860eaced\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116901 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a26573b3-3899-4b81-ba01-fabea24142cc-metrics-certs\") pod \"router-default-5444994796-wtlwb\" (UID: \"a26573b3-3899-4b81-ba01-fabea24142cc\") " pod="openshift-ingress/router-default-5444994796-wtlwb" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116915 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/33a6b5de-5ce1-4e23-b13d-feda684e25e0-serving-cert\") pod \"etcd-operator-b45778765-5cfgp\" (UID: \"33a6b5de-5ce1-4e23-b13d-feda684e25e0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5cfgp" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116930 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82qh8\" (UniqueName: \"kubernetes.io/projected/54beb89c-d2c3-42c8-a186-465ebcbb4e4b-kube-api-access-82qh8\") pod \"console-f9d7485db-tl5t9\" (UID: \"54beb89c-d2c3-42c8-a186-465ebcbb4e4b\") " pod="openshift-console/console-f9d7485db-tl5t9" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116952 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b464a77f-4bf7-4719-98c2-d07e5bde858a-config\") pod \"machine-api-operator-5694c8668f-zjwgh\" (UID: \"b464a77f-4bf7-4719-98c2-d07e5bde858a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zjwgh" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116968 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a275db4b-1993-4d42-b623-2bc621a59042-auth-proxy-config\") pod \"machine-approver-56656f9798-pf5mn\" (UID: \"a275db4b-1993-4d42-b623-2bc621a59042\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pf5mn" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.116982 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9556d\" (UniqueName: \"kubernetes.io/projected/a275db4b-1993-4d42-b623-2bc621a59042-kube-api-access-9556d\") pod \"machine-approver-56656f9798-pf5mn\" (UID: \"a275db4b-1993-4d42-b623-2bc621a59042\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pf5mn" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.117062 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9g2v\" (UniqueName: \"kubernetes.io/projected/bc778281-fc4f-4592-a805-e03aac7643b1-kube-api-access-n9g2v\") pod \"openshift-apiserver-operator-796bbdcf4f-57995\" (UID: \"bc778281-fc4f-4592-a805-e03aac7643b1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-57995" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.117085 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a26573b3-3899-4b81-ba01-fabea24142cc-service-ca-bundle\") pod \"router-default-5444994796-wtlwb\" (UID: \"a26573b3-3899-4b81-ba01-fabea24142cc\") " pod="openshift-ingress/router-default-5444994796-wtlwb" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.117106 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0f706843-68b8-4598-9482-9c1361f60a63-serving-cert\") pod \"apiserver-7bbb656c7d-qpb9s\" (UID: \"0f706843-68b8-4598-9482-9c1361f60a63\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.117126 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b464a77f-4bf7-4719-98c2-d07e5bde858a-images\") pod \"machine-api-operator-5694c8668f-zjwgh\" (UID: \"b464a77f-4bf7-4719-98c2-d07e5bde858a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zjwgh" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.117149 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjt7h\" (UniqueName: \"kubernetes.io/projected/b464a77f-4bf7-4719-98c2-d07e5bde858a-kube-api-access-bjt7h\") pod \"machine-api-operator-5694c8668f-zjwgh\" (UID: \"b464a77f-4bf7-4719-98c2-d07e5bde858a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zjwgh" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.117171 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/54beb89c-d2c3-42c8-a186-465ebcbb4e4b-console-config\") pod \"console-f9d7485db-tl5t9\" (UID: \"54beb89c-d2c3-42c8-a186-465ebcbb4e4b\") " pod="openshift-console/console-f9d7485db-tl5t9" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.117193 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc778281-fc4f-4592-a805-e03aac7643b1-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-57995\" (UID: \"bc778281-fc4f-4592-a805-e03aac7643b1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-57995" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.117215 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac4ba204-8204-441b-9e16-5ec66a277675-config\") pod \"authentication-operator-69f744f599-vhhcx\" (UID: \"ac4ba204-8204-441b-9e16-5ec66a277675\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vhhcx" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.117237 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/995ba10c-c212-4afd-a95e-a03cbac4d08e-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-v2gnh\" (UID: \"995ba10c-c212-4afd-a95e-a03cbac4d08e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v2gnh" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.117259 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0f706843-68b8-4598-9482-9c1361f60a63-etcd-client\") pod \"apiserver-7bbb656c7d-qpb9s\" (UID: \"0f706843-68b8-4598-9482-9c1361f60a63\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.117278 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/984ee29c-596a-48e8-80a4-cf8fda878e7f-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-8tgqm\" (UID: \"984ee29c-596a-48e8-80a4-cf8fda878e7f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tgqm" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.117302 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/984ee29c-596a-48e8-80a4-cf8fda878e7f-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-8tgqm\" (UID: \"984ee29c-596a-48e8-80a4-cf8fda878e7f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tgqm" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.117325 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a275db4b-1993-4d42-b623-2bc621a59042-config\") pod \"machine-approver-56656f9798-pf5mn\" (UID: \"a275db4b-1993-4d42-b623-2bc621a59042\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pf5mn" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.117363 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vfb4\" (UniqueName: \"kubernetes.io/projected/984ee29c-596a-48e8-80a4-cf8fda878e7f-kube-api-access-9vfb4\") pod \"cluster-image-registry-operator-dc59b4c8b-8tgqm\" (UID: \"984ee29c-596a-48e8-80a4-cf8fda878e7f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tgqm" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.117384 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/54beb89c-d2c3-42c8-a186-465ebcbb4e4b-trusted-ca-bundle\") pod \"console-f9d7485db-tl5t9\" (UID: \"54beb89c-d2c3-42c8-a186-465ebcbb4e4b\") " pod="openshift-console/console-f9d7485db-tl5t9" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.117410 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/995ba10c-c212-4afd-a95e-a03cbac4d08e-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-v2gnh\" (UID: \"995ba10c-c212-4afd-a95e-a03cbac4d08e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v2gnh" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.117436 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/a26573b3-3899-4b81-ba01-fabea24142cc-stats-auth\") pod \"router-default-5444994796-wtlwb\" (UID: \"a26573b3-3899-4b81-ba01-fabea24142cc\") " pod="openshift-ingress/router-default-5444994796-wtlwb" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.117461 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7z8s\" (UniqueName: \"kubernetes.io/projected/78c789c6-4ac0-4a7b-8cb6-b072909ccf03-kube-api-access-m7z8s\") pod \"downloads-7954f5f757-qx7nr\" (UID: \"78c789c6-4ac0-4a7b-8cb6-b072909ccf03\") " pod="openshift-console/downloads-7954f5f757-qx7nr" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.117484 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c02678af-0ebc-4689-ad5c-1300cb95439e-client-ca\") pod \"controller-manager-879f6c89f-fg2ch\" (UID: \"c02678af-0ebc-4689-ad5c-1300cb95439e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fg2ch" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.117511 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/33a6b5de-5ce1-4e23-b13d-feda684e25e0-etcd-service-ca\") pod \"etcd-operator-b45778765-5cfgp\" (UID: \"33a6b5de-5ce1-4e23-b13d-feda684e25e0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5cfgp" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.117535 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aa569267-90f6-47ac-8cf0-467ba17c8e06-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-xwtjj\" (UID: \"aa569267-90f6-47ac-8cf0-467ba17c8e06\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xwtjj" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.117560 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ac4ba204-8204-441b-9e16-5ec66a277675-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-vhhcx\" (UID: \"ac4ba204-8204-441b-9e16-5ec66a277675\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vhhcx" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.117578 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0f706843-68b8-4598-9482-9c1361f60a63-audit-policies\") pod \"apiserver-7bbb656c7d-qpb9s\" (UID: \"0f706843-68b8-4598-9482-9c1361f60a63\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.118189 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0f706843-68b8-4598-9482-9c1361f60a63-audit-policies\") pod \"apiserver-7bbb656c7d-qpb9s\" (UID: \"0f706843-68b8-4598-9482-9c1361f60a63\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.119916 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d94bdd0-bfa5-4e54-a39a-8d8f64e10f99-config\") pod \"console-operator-58897d9998-rv854\" (UID: \"7d94bdd0-bfa5-4e54-a39a-8d8f64e10f99\") " pod="openshift-console-operator/console-operator-58897d9998-rv854" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.119983 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c02678af-0ebc-4689-ad5c-1300cb95439e-config\") pod \"controller-manager-879f6c89f-fg2ch\" (UID: \"c02678af-0ebc-4689-ad5c-1300cb95439e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fg2ch" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.120111 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ac4ba204-8204-441b-9e16-5ec66a277675-service-ca-bundle\") pod \"authentication-operator-69f744f599-vhhcx\" (UID: \"ac4ba204-8204-441b-9e16-5ec66a277675\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vhhcx" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.120610 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/54beb89c-d2c3-42c8-a186-465ebcbb4e4b-console-config\") pod \"console-f9d7485db-tl5t9\" (UID: \"54beb89c-d2c3-42c8-a186-465ebcbb4e4b\") " pod="openshift-console/console-f9d7485db-tl5t9" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.120809 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1f4e7fae-d9a4-4f20-8658-4544860eaced-client-ca\") pod \"route-controller-manager-6576b87f9c-kfmnl\" (UID: \"1f4e7fae-d9a4-4f20-8658-4544860eaced\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.120969 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/33a6b5de-5ce1-4e23-b13d-feda684e25e0-etcd-ca\") pod \"etcd-operator-b45778765-5cfgp\" (UID: \"33a6b5de-5ce1-4e23-b13d-feda684e25e0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5cfgp" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.122750 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rb6cf"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.122811 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tgqm"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.123362 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/984ee29c-596a-48e8-80a4-cf8fda878e7f-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-8tgqm\" (UID: \"984ee29c-596a-48e8-80a4-cf8fda878e7f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tgqm" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.123761 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-mkdl7"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.124552 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/54beb89c-d2c3-42c8-a186-465ebcbb4e4b-oauth-serving-cert\") pod \"console-f9d7485db-tl5t9\" (UID: \"54beb89c-d2c3-42c8-a186-465ebcbb4e4b\") " pod="openshift-console/console-f9d7485db-tl5t9" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.125145 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa569267-90f6-47ac-8cf0-467ba17c8e06-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-xwtjj\" (UID: \"aa569267-90f6-47ac-8cf0-467ba17c8e06\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xwtjj" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.125809 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c02678af-0ebc-4689-ad5c-1300cb95439e-serving-cert\") pod \"controller-manager-879f6c89f-fg2ch\" (UID: \"c02678af-0ebc-4689-ad5c-1300cb95439e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fg2ch" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.126057 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/54beb89c-d2c3-42c8-a186-465ebcbb4e4b-console-oauth-config\") pod \"console-f9d7485db-tl5t9\" (UID: \"54beb89c-d2c3-42c8-a186-465ebcbb4e4b\") " pod="openshift-console/console-f9d7485db-tl5t9" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.126215 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411265-f5k6f"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.126409 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/54beb89c-d2c3-42c8-a186-465ebcbb4e4b-service-ca\") pod \"console-f9d7485db-tl5t9\" (UID: \"54beb89c-d2c3-42c8-a186-465ebcbb4e4b\") " pod="openshift-console/console-f9d7485db-tl5t9" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.126943 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc778281-fc4f-4592-a805-e03aac7643b1-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-57995\" (UID: \"bc778281-fc4f-4592-a805-e03aac7643b1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-57995" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.127223 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a275db4b-1993-4d42-b623-2bc621a59042-auth-proxy-config\") pod \"machine-approver-56656f9798-pf5mn\" (UID: \"a275db4b-1993-4d42-b623-2bc621a59042\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pf5mn" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.127287 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b464a77f-4bf7-4719-98c2-d07e5bde858a-config\") pod \"machine-api-operator-5694c8668f-zjwgh\" (UID: \"b464a77f-4bf7-4719-98c2-d07e5bde858a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zjwgh" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.127410 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b464a77f-4bf7-4719-98c2-d07e5bde858a-images\") pod \"machine-api-operator-5694c8668f-zjwgh\" (UID: \"b464a77f-4bf7-4719-98c2-d07e5bde858a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zjwgh" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.127420 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f4e7fae-d9a4-4f20-8658-4544860eaced-serving-cert\") pod \"route-controller-manager-6576b87f9c-kfmnl\" (UID: \"1f4e7fae-d9a4-4f20-8658-4544860eaced\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.127512 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0f706843-68b8-4598-9482-9c1361f60a63-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-qpb9s\" (UID: \"0f706843-68b8-4598-9482-9c1361f60a63\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.128210 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ac4ba204-8204-441b-9e16-5ec66a277675-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-vhhcx\" (UID: \"ac4ba204-8204-441b-9e16-5ec66a277675\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vhhcx" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.128246 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a26573b3-3899-4b81-ba01-fabea24142cc-service-ca-bundle\") pod \"router-default-5444994796-wtlwb\" (UID: \"a26573b3-3899-4b81-ba01-fabea24142cc\") " pod="openshift-ingress/router-default-5444994796-wtlwb" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.128641 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/33a6b5de-5ce1-4e23-b13d-feda684e25e0-etcd-service-ca\") pod \"etcd-operator-b45778765-5cfgp\" (UID: \"33a6b5de-5ce1-4e23-b13d-feda684e25e0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5cfgp" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.128707 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/a275db4b-1993-4d42-b623-2bc621a59042-machine-approver-tls\") pod \"machine-approver-56656f9798-pf5mn\" (UID: \"a275db4b-1993-4d42-b623-2bc621a59042\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pf5mn" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.129289 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33a6b5de-5ce1-4e23-b13d-feda684e25e0-config\") pod \"etcd-operator-b45778765-5cfgp\" (UID: \"33a6b5de-5ce1-4e23-b13d-feda684e25e0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5cfgp" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.129382 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/995ba10c-c212-4afd-a95e-a03cbac4d08e-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-v2gnh\" (UID: \"995ba10c-c212-4afd-a95e-a03cbac4d08e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v2gnh" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.129842 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/54beb89c-d2c3-42c8-a186-465ebcbb4e4b-console-serving-cert\") pod \"console-f9d7485db-tl5t9\" (UID: \"54beb89c-d2c3-42c8-a186-465ebcbb4e4b\") " pod="openshift-console/console-f9d7485db-tl5t9" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.130004 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d94bdd0-bfa5-4e54-a39a-8d8f64e10f99-serving-cert\") pod \"console-operator-58897d9998-rv854\" (UID: \"7d94bdd0-bfa5-4e54-a39a-8d8f64e10f99\") " pod="openshift-console-operator/console-operator-58897d9998-rv854" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.130151 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.130330 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-vhp6r"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.130171 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c02678af-0ebc-4689-ad5c-1300cb95439e-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-fg2ch\" (UID: \"c02678af-0ebc-4689-ad5c-1300cb95439e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fg2ch" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.130454 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f4e7fae-d9a4-4f20-8658-4544860eaced-config\") pod \"route-controller-manager-6576b87f9c-kfmnl\" (UID: \"1f4e7fae-d9a4-4f20-8658-4544860eaced\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.130621 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc778281-fc4f-4592-a805-e03aac7643b1-config\") pod \"openshift-apiserver-operator-796bbdcf4f-57995\" (UID: \"bc778281-fc4f-4592-a805-e03aac7643b1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-57995" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.130771 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/0863475f-c2fe-4b3c-abca-eaecb1e26561-available-featuregates\") pod \"openshift-config-operator-7777fb866f-bj7cn\" (UID: \"0863475f-c2fe-4b3c-abca-eaecb1e26561\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bj7cn" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.131079 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aa569267-90f6-47ac-8cf0-467ba17c8e06-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-xwtjj\" (UID: \"aa569267-90f6-47ac-8cf0-467ba17c8e06\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xwtjj" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.131387 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/0f706843-68b8-4598-9482-9c1361f60a63-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-qpb9s\" (UID: \"0f706843-68b8-4598-9482-9c1361f60a63\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.131615 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c02678af-0ebc-4689-ad5c-1300cb95439e-client-ca\") pod \"controller-manager-879f6c89f-fg2ch\" (UID: \"c02678af-0ebc-4689-ad5c-1300cb95439e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fg2ch" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.131761 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-vhp6r" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.132169 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7d94bdd0-bfa5-4e54-a39a-8d8f64e10f99-trusted-ca\") pod \"console-operator-58897d9998-rv854\" (UID: \"7d94bdd0-bfa5-4e54-a39a-8d8f64e10f99\") " pod="openshift-console-operator/console-operator-58897d9998-rv854" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.132366 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a275db4b-1993-4d42-b623-2bc621a59042-config\") pod \"machine-approver-56656f9798-pf5mn\" (UID: \"a275db4b-1993-4d42-b623-2bc621a59042\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pf5mn" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.132432 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0f706843-68b8-4598-9482-9c1361f60a63-audit-dir\") pod \"apiserver-7bbb656c7d-qpb9s\" (UID: \"0f706843-68b8-4598-9482-9c1361f60a63\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.132463 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/54beb89c-d2c3-42c8-a186-465ebcbb4e4b-trusted-ca-bundle\") pod \"console-f9d7485db-tl5t9\" (UID: \"54beb89c-d2c3-42c8-a186-465ebcbb4e4b\") " pod="openshift-console/console-f9d7485db-tl5t9" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.132827 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac4ba204-8204-441b-9e16-5ec66a277675-config\") pod \"authentication-operator-69f744f599-vhhcx\" (UID: \"ac4ba204-8204-441b-9e16-5ec66a277675\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vhhcx" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.133035 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/33a6b5de-5ce1-4e23-b13d-feda684e25e0-serving-cert\") pod \"etcd-operator-b45778765-5cfgp\" (UID: \"33a6b5de-5ce1-4e23-b13d-feda684e25e0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5cfgp" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.133192 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a26573b3-3899-4b81-ba01-fabea24142cc-metrics-certs\") pod \"router-default-5444994796-wtlwb\" (UID: \"a26573b3-3899-4b81-ba01-fabea24142cc\") " pod="openshift-ingress/router-default-5444994796-wtlwb" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.133399 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0f706843-68b8-4598-9482-9c1361f60a63-serving-cert\") pod \"apiserver-7bbb656c7d-qpb9s\" (UID: \"0f706843-68b8-4598-9482-9c1361f60a63\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.133772 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/995ba10c-c212-4afd-a95e-a03cbac4d08e-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-v2gnh\" (UID: \"995ba10c-c212-4afd-a95e-a03cbac4d08e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v2gnh" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.133940 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/b464a77f-4bf7-4719-98c2-d07e5bde858a-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-zjwgh\" (UID: \"b464a77f-4bf7-4719-98c2-d07e5bde858a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zjwgh" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.133958 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/984ee29c-596a-48e8-80a4-cf8fda878e7f-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-8tgqm\" (UID: \"984ee29c-596a-48e8-80a4-cf8fda878e7f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tgqm" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.134093 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-pcp6j"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.134205 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/a26573b3-3899-4b81-ba01-fabea24142cc-default-certificate\") pod \"router-default-5444994796-wtlwb\" (UID: \"a26573b3-3899-4b81-ba01-fabea24142cc\") " pod="openshift-ingress/router-default-5444994796-wtlwb" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.134671 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/a26573b3-3899-4b81-ba01-fabea24142cc-stats-auth\") pod \"router-default-5444994796-wtlwb\" (UID: \"a26573b3-3899-4b81-ba01-fabea24142cc\") " pod="openshift-ingress/router-default-5444994796-wtlwb" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.134779 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/33a6b5de-5ce1-4e23-b13d-feda684e25e0-etcd-client\") pod \"etcd-operator-b45778765-5cfgp\" (UID: \"33a6b5de-5ce1-4e23-b13d-feda684e25e0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5cfgp" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.135024 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/0f706843-68b8-4598-9482-9c1361f60a63-encryption-config\") pod \"apiserver-7bbb656c7d-qpb9s\" (UID: \"0f706843-68b8-4598-9482-9c1361f60a63\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.135792 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0863475f-c2fe-4b3c-abca-eaecb1e26561-serving-cert\") pod \"openshift-config-operator-7777fb866f-bj7cn\" (UID: \"0863475f-c2fe-4b3c-abca-eaecb1e26561\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bj7cn" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.135872 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/213e4819-aa67-42ad-954c-5a461b370407-metrics-tls\") pod \"dns-operator-744455d44c-brktp\" (UID: \"213e4819-aa67-42ad-954c-5a461b370407\") " pod="openshift-dns-operator/dns-operator-744455d44c-brktp" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.136940 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rgmvn"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.137123 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-pcp6j" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.137480 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-857kt"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.138452 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-wb9r7"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.139464 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-drfrd"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.140286 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0f706843-68b8-4598-9482-9c1361f60a63-etcd-client\") pod \"apiserver-7bbb656c7d-qpb9s\" (UID: \"0f706843-68b8-4598-9482-9c1361f60a63\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.140671 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-pcp6j"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.141824 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-q8vvn"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.142901 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-x7gjh"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.143905 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-n2cvg"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.144848 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-n2cvg" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.144899 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-n2cvg"] Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.146472 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac4ba204-8204-441b-9e16-5ec66a277675-serving-cert\") pod \"authentication-operator-69f744f599-vhhcx\" (UID: \"ac4ba204-8204-441b-9e16-5ec66a277675\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vhhcx" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.149710 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.168707 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.188206 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.208975 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.232437 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.248797 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.268521 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.288566 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.309254 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.329411 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.348567 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.369046 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.404893 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.411299 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.429123 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.448578 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.468605 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.488640 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.508511 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.528659 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.549197 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.568601 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.588706 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.609162 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.637634 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.649496 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.668928 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.690055 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.709413 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.729206 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.749083 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.769042 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.788656 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.808815 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.836428 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.870153 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.888884 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.908749 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.928771 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.950058 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.970062 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 02 11:53:32 crc kubenswrapper[4721]: I1202 11:53:32.988767 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.009814 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.027747 4721 request.go:700] Waited for 1.0019134s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-multus/secrets?fieldSelector=metadata.name%3Dmultus-ac-dockercfg-9lkdf&limit=500&resourceVersion=0 Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.030121 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.048576 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.069588 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.089063 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.110224 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.129943 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.149916 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.168746 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.199510 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.208706 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.229675 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.249656 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.268393 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.288609 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.309766 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.329597 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.351002 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.368883 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.390307 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.409869 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.429930 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.450249 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.490818 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.508659 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.529025 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.550108 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.569786 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.588969 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.609331 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.629336 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.650901 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.669968 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.689744 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.710117 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.729455 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.750647 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.769228 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.803520 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjt7h\" (UniqueName: \"kubernetes.io/projected/b464a77f-4bf7-4719-98c2-d07e5bde858a-kube-api-access-bjt7h\") pod \"machine-api-operator-5694c8668f-zjwgh\" (UID: \"b464a77f-4bf7-4719-98c2-d07e5bde858a\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zjwgh" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.824254 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zq4rh\" (UniqueName: \"kubernetes.io/projected/33a6b5de-5ce1-4e23-b13d-feda684e25e0-kube-api-access-zq4rh\") pod \"etcd-operator-b45778765-5cfgp\" (UID: \"33a6b5de-5ce1-4e23-b13d-feda684e25e0\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5cfgp" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.843278 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qvjx\" (UniqueName: \"kubernetes.io/projected/aa569267-90f6-47ac-8cf0-467ba17c8e06-kube-api-access-6qvjx\") pod \"openshift-controller-manager-operator-756b6f6bc6-xwtjj\" (UID: \"aa569267-90f6-47ac-8cf0-467ba17c8e06\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xwtjj" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.863061 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/995ba10c-c212-4afd-a95e-a03cbac4d08e-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-v2gnh\" (UID: \"995ba10c-c212-4afd-a95e-a03cbac4d08e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v2gnh" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.886549 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82qh8\" (UniqueName: \"kubernetes.io/projected/54beb89c-d2c3-42c8-a186-465ebcbb4e4b-kube-api-access-82qh8\") pod \"console-f9d7485db-tl5t9\" (UID: \"54beb89c-d2c3-42c8-a186-465ebcbb4e4b\") " pod="openshift-console/console-f9d7485db-tl5t9" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.899125 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xwtjj" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.902193 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnsb9\" (UniqueName: \"kubernetes.io/projected/ac4ba204-8204-441b-9e16-5ec66a277675-kube-api-access-tnsb9\") pod \"authentication-operator-69f744f599-vhhcx\" (UID: \"ac4ba204-8204-441b-9e16-5ec66a277675\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vhhcx" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.924372 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-5cfgp" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.927530 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9556d\" (UniqueName: \"kubernetes.io/projected/a275db4b-1993-4d42-b623-2bc621a59042-kube-api-access-9556d\") pod \"machine-approver-56656f9798-pf5mn\" (UID: \"a275db4b-1993-4d42-b623-2bc621a59042\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pf5mn" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.947165 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gv94b\" (UniqueName: \"kubernetes.io/projected/a26573b3-3899-4b81-ba01-fabea24142cc-kube-api-access-gv94b\") pod \"router-default-5444994796-wtlwb\" (UID: \"a26573b3-3899-4b81-ba01-fabea24142cc\") " pod="openshift-ingress/router-default-5444994796-wtlwb" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.966670 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9g2v\" (UniqueName: \"kubernetes.io/projected/bc778281-fc4f-4592-a805-e03aac7643b1-kube-api-access-n9g2v\") pod \"openshift-apiserver-operator-796bbdcf4f-57995\" (UID: \"bc778281-fc4f-4592-a805-e03aac7643b1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-57995" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.977246 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v2gnh" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.981686 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-wtlwb" Dec 02 11:53:33 crc kubenswrapper[4721]: I1202 11:53:33.994385 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bz9c5\" (UniqueName: \"kubernetes.io/projected/c02678af-0ebc-4689-ad5c-1300cb95439e-kube-api-access-bz9c5\") pod \"controller-manager-879f6c89f-fg2ch\" (UID: \"c02678af-0ebc-4689-ad5c-1300cb95439e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fg2ch" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.006722 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-zjwgh" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.010084 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xb2mc\" (UniqueName: \"kubernetes.io/projected/7d94bdd0-bfa5-4e54-a39a-8d8f64e10f99-kube-api-access-xb2mc\") pod \"console-operator-58897d9998-rv854\" (UID: \"7d94bdd0-bfa5-4e54-a39a-8d8f64e10f99\") " pod="openshift-console-operator/console-operator-58897d9998-rv854" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.028255 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rc2hd\" (UniqueName: \"kubernetes.io/projected/213e4819-aa67-42ad-954c-5a461b370407-kube-api-access-rc2hd\") pod \"dns-operator-744455d44c-brktp\" (UID: \"213e4819-aa67-42ad-954c-5a461b370407\") " pod="openshift-dns-operator/dns-operator-744455d44c-brktp" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.030507 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-fg2ch" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.047008 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vfb4\" (UniqueName: \"kubernetes.io/projected/984ee29c-596a-48e8-80a4-cf8fda878e7f-kube-api-access-9vfb4\") pod \"cluster-image-registry-operator-dc59b4c8b-8tgqm\" (UID: \"984ee29c-596a-48e8-80a4-cf8fda878e7f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tgqm" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.047103 4721 request.go:700] Waited for 1.915990164s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-oauth-apiserver/serviceaccounts/oauth-apiserver-sa/token Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.068719 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jsf4\" (UniqueName: \"kubernetes.io/projected/0f706843-68b8-4598-9482-9c1361f60a63-kube-api-access-8jsf4\") pod \"apiserver-7bbb656c7d-qpb9s\" (UID: \"0f706843-68b8-4598-9482-9c1361f60a63\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.088882 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7z8s\" (UniqueName: \"kubernetes.io/projected/78c789c6-4ac0-4a7b-8cb6-b072909ccf03-kube-api-access-m7z8s\") pod \"downloads-7954f5f757-qx7nr\" (UID: \"78c789c6-4ac0-4a7b-8cb6-b072909ccf03\") " pod="openshift-console/downloads-7954f5f757-qx7nr" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.105914 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/984ee29c-596a-48e8-80a4-cf8fda878e7f-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-8tgqm\" (UID: \"984ee29c-596a-48e8-80a4-cf8fda878e7f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tgqm" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.109277 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.123592 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-vhhcx" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.132138 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xwtjj"] Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.132278 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pf5mn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.145009 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-57995" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.146371 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-5cfgp"] Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.146555 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dpkw\" (UniqueName: \"kubernetes.io/projected/0863475f-c2fe-4b3c-abca-eaecb1e26561-kube-api-access-8dpkw\") pod \"openshift-config-operator-7777fb866f-bj7cn\" (UID: \"0863475f-c2fe-4b3c-abca-eaecb1e26561\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bj7cn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.161900 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m96xx\" (UniqueName: \"kubernetes.io/projected/1f4e7fae-d9a4-4f20-8658-4544860eaced-kube-api-access-m96xx\") pod \"route-controller-manager-6576b87f9c-kfmnl\" (UID: \"1f4e7fae-d9a4-4f20-8658-4544860eaced\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.162086 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-rv854" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.169274 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.170282 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-tl5t9" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.179594 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bj7cn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.190049 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.209310 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-brktp" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.209363 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.232082 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.246227 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v2gnh"] Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.249126 4721 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.267975 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tgqm" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.269184 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.277168 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-qx7nr" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.284801 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-fg2ch"] Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.291905 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.308980 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 02 11:53:34 crc kubenswrapper[4721]: W1202 11:53:34.328084 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod995ba10c_c212_4afd_a95e_a03cbac4d08e.slice/crio-1ecdda3bd1163d4e23436ab77551c4f6a0c7715c85d9ef77044e3814a677231f WatchSource:0}: Error finding container 1ecdda3bd1163d4e23436ab77551c4f6a0c7715c85d9ef77044e3814a677231f: Status 404 returned error can't find the container with id 1ecdda3bd1163d4e23436ab77551c4f6a0c7715c85d9ef77044e3814a677231f Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.333188 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-vhhcx"] Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.353794 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ed8a3365-c6aa-472a-bb41-10905fcabbf6-registry-tls\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.353836 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.353861 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sklvz\" (UniqueName: \"kubernetes.io/projected/fae5f727-ca2f-43b8-9ab9-b6817c537c19-kube-api-access-sklvz\") pod \"packageserver-d55dfcdfc-94q47\" (UID: \"fae5f727-ca2f-43b8-9ab9-b6817c537c19\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-94q47" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.353883 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/db4c5b64-6af8-4707-a667-b8f1e8435335-profile-collector-cert\") pod \"olm-operator-6b444d44fb-rgmvn\" (UID: \"db4c5b64-6af8-4707-a667-b8f1e8435335\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rgmvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.353915 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ed8a3365-c6aa-472a-bb41-10905fcabbf6-ca-trust-extracted\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.353933 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-trusted-ca-bundle\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.353965 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-audit-dir\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.353984 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9499299a-d0a7-4f0b-a123-9a8d5e3fd0a9-metrics-tls\") pod \"ingress-operator-5b745b69d9-clflg\" (UID: \"9499299a-d0a7-4f0b-a123-9a8d5e3fd0a9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-clflg" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.354003 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b45dc373-92c5-48f1-9a77-309f0553079e-auth-proxy-config\") pod \"machine-config-operator-74547568cd-ctcl6\" (UID: \"b45dc373-92c5-48f1-9a77-309f0553079e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ctcl6" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.354096 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a8ef44c3-c7a8-4f21-b180-a6c3a9f7e292-cert\") pod \"ingress-canary-mkdl7\" (UID: \"a8ef44c3-c7a8-4f21-b180-a6c3a9f7e292\") " pod="openshift-ingress-canary/ingress-canary-mkdl7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.354123 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.354145 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/30469e3e-7560-4574-8657-06fe673ed079-proxy-tls\") pod \"machine-config-controller-84d6567774-8wrt4\" (UID: \"30469e3e-7560-4574-8657-06fe673ed079\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8wrt4" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.354256 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ed8a3365-c6aa-472a-bb41-10905fcabbf6-installation-pull-secrets\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.354413 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356144 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzggc\" (UniqueName: \"kubernetes.io/projected/71be1dc6-76a8-4155-9900-1553a730a64a-kube-api-access-dzggc\") pod \"cluster-samples-operator-665b6dd947-xf8pm\" (UID: \"71be1dc6-76a8-4155-9900-1553a730a64a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xf8pm" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356172 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ed8a3365-c6aa-472a-bb41-10905fcabbf6-bound-sa-token\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356208 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-encryption-config\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.355868 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356248 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/a22a1d4a-00c0-4109-8eff-3492590aeb1a-srv-cert\") pod \"catalog-operator-68c6474976-857kt\" (UID: \"a22a1d4a-00c0-4109-8eff-3492590aeb1a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-857kt" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356283 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356315 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-config\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356332 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wscdz\" (UniqueName: \"kubernetes.io/projected/70b6509b-bcfc-4ad7-8d97-ce279333b048-kube-api-access-wscdz\") pod \"kube-storage-version-migrator-operator-b67b599dd-v8wzq\" (UID: \"70b6509b-bcfc-4ad7-8d97-ce279333b048\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v8wzq" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356427 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpwqh\" (UniqueName: \"kubernetes.io/projected/a22a1d4a-00c0-4109-8eff-3492590aeb1a-kube-api-access-kpwqh\") pod \"catalog-operator-68c6474976-857kt\" (UID: \"a22a1d4a-00c0-4109-8eff-3492590aeb1a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-857kt" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356442 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/fae5f727-ca2f-43b8-9ab9-b6817c537c19-tmpfs\") pod \"packageserver-d55dfcdfc-94q47\" (UID: \"fae5f727-ca2f-43b8-9ab9-b6817c537c19\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-94q47" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356489 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qdks\" (UniqueName: \"kubernetes.io/projected/ed8a3365-c6aa-472a-bb41-10905fcabbf6-kube-api-access-5qdks\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356504 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e7980e22-75e8-4495-afb8-f486065789c4-config-volume\") pod \"collect-profiles-29411265-f5k6f\" (UID: \"e7980e22-75e8-4495-afb8-f486065789c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411265-f5k6f" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356553 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/30469e3e-7560-4574-8657-06fe673ed079-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-8wrt4\" (UID: \"30469e3e-7560-4574-8657-06fe673ed079\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8wrt4" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356571 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/18b11741-408b-4270-abd3-b43fd24149ab-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-drfrd\" (UID: \"18b11741-408b-4270-abd3-b43fd24149ab\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-drfrd" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356587 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/bc7ef085-1e7b-4bcc-b00b-42deb769d466-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-n2z7b\" (UID: \"bc7ef085-1e7b-4bcc-b00b-42deb769d466\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-n2z7b" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356622 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356639 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e528e899-db16-4e75-a55f-4eaa2462b90d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-rb6cf\" (UID: \"e528e899-db16-4e75-a55f-4eaa2462b90d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rb6cf" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356657 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-audit-dir\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356672 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/fae5f727-ca2f-43b8-9ab9-b6817c537c19-apiservice-cert\") pod \"packageserver-d55dfcdfc-94q47\" (UID: \"fae5f727-ca2f-43b8-9ab9-b6817c537c19\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-94q47" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356689 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ed8a3365-c6aa-472a-bb41-10905fcabbf6-registry-certificates\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356705 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5754f3db-279a-4340-9354-8709b29e0398-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-gtb8k\" (UID: \"5754f3db-279a-4340-9354-8709b29e0398\") " pod="openshift-marketplace/marketplace-operator-79b997595-gtb8k" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356733 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ts64\" (UniqueName: \"kubernetes.io/projected/bc7ef085-1e7b-4bcc-b00b-42deb769d466-kube-api-access-5ts64\") pod \"multus-admission-controller-857f4d67dd-n2z7b\" (UID: \"bc7ef085-1e7b-4bcc-b00b-42deb769d466\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-n2z7b" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356750 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vc7tv\" (UniqueName: \"kubernetes.io/projected/18b11741-408b-4270-abd3-b43fd24149ab-kube-api-access-vc7tv\") pod \"control-plane-machine-set-operator-78cbb6b69f-drfrd\" (UID: \"18b11741-408b-4270-abd3-b43fd24149ab\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-drfrd" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356767 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/db4c5b64-6af8-4707-a667-b8f1e8435335-srv-cert\") pod \"olm-operator-6b444d44fb-rgmvn\" (UID: \"db4c5b64-6af8-4707-a667-b8f1e8435335\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rgmvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356785 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2063cb21-914f-4d0c-a6a7-e8edee1a4f9a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-dwqdt\" (UID: \"2063cb21-914f-4d0c-a6a7-e8edee1a4f9a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dwqdt" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356810 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvpc5\" (UniqueName: \"kubernetes.io/projected/36ba50bc-f888-4f82-af4c-9879d8089273-kube-api-access-kvpc5\") pod \"service-ca-operator-777779d784-x7gjh\" (UID: \"36ba50bc-f888-4f82-af4c-9879d8089273\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-x7gjh" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356830 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/fae5f727-ca2f-43b8-9ab9-b6817c537c19-webhook-cert\") pod \"packageserver-d55dfcdfc-94q47\" (UID: \"fae5f727-ca2f-43b8-9ab9-b6817c537c19\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-94q47" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356847 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpxrl\" (UniqueName: \"kubernetes.io/projected/2b5cafc4-348a-4e37-b2a1-4c51e5111d43-kube-api-access-mpxrl\") pod \"migrator-59844c95c7-66mxw\" (UID: \"2b5cafc4-348a-4e37-b2a1-4c51e5111d43\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-66mxw" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356863 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b45dc373-92c5-48f1-9a77-309f0553079e-images\") pod \"machine-config-operator-74547568cd-ctcl6\" (UID: \"b45dc373-92c5-48f1-9a77-309f0553079e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ctcl6" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356889 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-serving-cert\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356925 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356943 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2063cb21-914f-4d0c-a6a7-e8edee1a4f9a-config\") pod \"kube-apiserver-operator-766d6c64bb-dwqdt\" (UID: \"2063cb21-914f-4d0c-a6a7-e8edee1a4f9a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dwqdt" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.356983 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.357001 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/71be1dc6-76a8-4155-9900-1553a730a64a-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-xf8pm\" (UID: \"71be1dc6-76a8-4155-9900-1553a730a64a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xf8pm" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.357026 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.357042 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e528e899-db16-4e75-a55f-4eaa2462b90d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-rb6cf\" (UID: \"e528e899-db16-4e75-a55f-4eaa2462b90d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rb6cf" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.357060 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70b6509b-bcfc-4ad7-8d97-ce279333b048-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-v8wzq\" (UID: \"70b6509b-bcfc-4ad7-8d97-ce279333b048\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v8wzq" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.357121 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqm54\" (UniqueName: \"kubernetes.io/projected/df329555-e5bd-49b3-bce4-c995b4999755-kube-api-access-lqm54\") pod \"package-server-manager-789f6589d5-v6mzl\" (UID: \"df329555-e5bd-49b3-bce4-c995b4999755\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v6mzl" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.357150 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36ba50bc-f888-4f82-af4c-9879d8089273-config\") pod \"service-ca-operator-777779d784-x7gjh\" (UID: \"36ba50bc-f888-4f82-af4c-9879d8089273\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-x7gjh" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.358546 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dfxx\" (UniqueName: \"kubernetes.io/projected/5754f3db-279a-4340-9354-8709b29e0398-kube-api-access-6dfxx\") pod \"marketplace-operator-79b997595-gtb8k\" (UID: \"5754f3db-279a-4340-9354-8709b29e0398\") " pod="openshift-marketplace/marketplace-operator-79b997595-gtb8k" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.358580 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6wkx\" (UniqueName: \"kubernetes.io/projected/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-kube-api-access-l6wkx\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.358618 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e7980e22-75e8-4495-afb8-f486065789c4-secret-volume\") pod \"collect-profiles-29411265-f5k6f\" (UID: \"e7980e22-75e8-4495-afb8-f486065789c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411265-f5k6f" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.358643 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9499299a-d0a7-4f0b-a123-9a8d5e3fd0a9-bound-sa-token\") pod \"ingress-operator-5b745b69d9-clflg\" (UID: \"9499299a-d0a7-4f0b-a123-9a8d5e3fd0a9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-clflg" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.358666 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-node-pullsecrets\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.359114 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.359143 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jstd8\" (UniqueName: \"kubernetes.io/projected/db4c5b64-6af8-4707-a667-b8f1e8435335-kube-api-access-jstd8\") pod \"olm-operator-6b444d44fb-rgmvn\" (UID: \"db4c5b64-6af8-4707-a667-b8f1e8435335\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rgmvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.359166 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwj8s\" (UniqueName: \"kubernetes.io/projected/9499299a-d0a7-4f0b-a123-9a8d5e3fd0a9-kube-api-access-hwj8s\") pod \"ingress-operator-5b745b69d9-clflg\" (UID: \"9499299a-d0a7-4f0b-a123-9a8d5e3fd0a9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-clflg" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.359244 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czztn\" (UniqueName: \"kubernetes.io/projected/30469e3e-7560-4574-8657-06fe673ed079-kube-api-access-czztn\") pod \"machine-config-controller-84d6567774-8wrt4\" (UID: \"30469e3e-7560-4574-8657-06fe673ed079\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8wrt4" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.359262 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-image-import-ca\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.359278 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmtzj\" (UniqueName: \"kubernetes.io/projected/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-kube-api-access-xmtzj\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.359318 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/5cd2ab71-ee95-4e70-a189-ef6152172de4-signing-key\") pod \"service-ca-9c57cc56f-bl9q7\" (UID: \"5cd2ab71-ee95-4e70-a189-ef6152172de4\") " pod="openshift-service-ca/service-ca-9c57cc56f-bl9q7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.359351 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-audit-policies\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.359546 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4slm\" (UniqueName: \"kubernetes.io/projected/b45dc373-92c5-48f1-9a77-309f0553079e-kube-api-access-b4slm\") pod \"machine-config-operator-74547568cd-ctcl6\" (UID: \"b45dc373-92c5-48f1-9a77-309f0553079e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ctcl6" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.359703 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/a22a1d4a-00c0-4109-8eff-3492590aeb1a-profile-collector-cert\") pod \"catalog-operator-68c6474976-857kt\" (UID: \"a22a1d4a-00c0-4109-8eff-3492590aeb1a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-857kt" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.359763 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9499299a-d0a7-4f0b-a123-9a8d5e3fd0a9-trusted-ca\") pod \"ingress-operator-5b745b69d9-clflg\" (UID: \"9499299a-d0a7-4f0b-a123-9a8d5e3fd0a9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-clflg" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.360233 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ed8a3365-c6aa-472a-bb41-10905fcabbf6-trusted-ca\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.360277 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/df329555-e5bd-49b3-bce4-c995b4999755-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-v6mzl\" (UID: \"df329555-e5bd-49b3-bce4-c995b4999755\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v6mzl" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.360323 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2063cb21-914f-4d0c-a6a7-e8edee1a4f9a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-dwqdt\" (UID: \"2063cb21-914f-4d0c-a6a7-e8edee1a4f9a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dwqdt" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.360465 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e528e899-db16-4e75-a55f-4eaa2462b90d-config\") pod \"kube-controller-manager-operator-78b949d7b-rb6cf\" (UID: \"e528e899-db16-4e75-a55f-4eaa2462b90d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rb6cf" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.360580 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-etcd-serving-ca\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.360669 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/5cd2ab71-ee95-4e70-a189-ef6152172de4-signing-cabundle\") pod \"service-ca-9c57cc56f-bl9q7\" (UID: \"5cd2ab71-ee95-4e70-a189-ef6152172de4\") " pod="openshift-service-ca/service-ca-9c57cc56f-bl9q7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.360716 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-etcd-client\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.360750 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-audit\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.360787 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/36ba50bc-f888-4f82-af4c-9879d8089273-serving-cert\") pod \"service-ca-operator-777779d784-x7gjh\" (UID: \"36ba50bc-f888-4f82-af4c-9879d8089273\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-x7gjh" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.360836 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5754f3db-279a-4340-9354-8709b29e0398-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-gtb8k\" (UID: \"5754f3db-279a-4340-9354-8709b29e0398\") " pod="openshift-marketplace/marketplace-operator-79b997595-gtb8k" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.360866 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.361011 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.361105 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.361154 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4twzf\" (UniqueName: \"kubernetes.io/projected/a8ef44c3-c7a8-4f21-b180-a6c3a9f7e292-kube-api-access-4twzf\") pod \"ingress-canary-mkdl7\" (UID: \"a8ef44c3-c7a8-4f21-b180-a6c3a9f7e292\") " pod="openshift-ingress-canary/ingress-canary-mkdl7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.361174 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kcps\" (UniqueName: \"kubernetes.io/projected/5cd2ab71-ee95-4e70-a189-ef6152172de4-kube-api-access-6kcps\") pod \"service-ca-9c57cc56f-bl9q7\" (UID: \"5cd2ab71-ee95-4e70-a189-ef6152172de4\") " pod="openshift-service-ca/service-ca-9c57cc56f-bl9q7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.361205 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/70b6509b-bcfc-4ad7-8d97-ce279333b048-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-v8wzq\" (UID: \"70b6509b-bcfc-4ad7-8d97-ce279333b048\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v8wzq" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.361281 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7fnp\" (UniqueName: \"kubernetes.io/projected/e7980e22-75e8-4495-afb8-f486065789c4-kube-api-access-t7fnp\") pod \"collect-profiles-29411265-f5k6f\" (UID: \"e7980e22-75e8-4495-afb8-f486065789c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411265-f5k6f" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.361311 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b45dc373-92c5-48f1-9a77-309f0553079e-proxy-tls\") pod \"machine-config-operator-74547568cd-ctcl6\" (UID: \"b45dc373-92c5-48f1-9a77-309f0553079e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ctcl6" Dec 02 11:53:34 crc kubenswrapper[4721]: E1202 11:53:34.361338 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:34.861326762 +0000 UTC m=+144.599877111 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.373087 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.436812 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-zjwgh"] Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.461858 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.462092 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e528e899-db16-4e75-a55f-4eaa2462b90d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-rb6cf\" (UID: \"e528e899-db16-4e75-a55f-4eaa2462b90d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rb6cf" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.462221 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.462386 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-audit-dir\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.462412 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/fae5f727-ca2f-43b8-9ab9-b6817c537c19-apiservice-cert\") pod \"packageserver-d55dfcdfc-94q47\" (UID: \"fae5f727-ca2f-43b8-9ab9-b6817c537c19\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-94q47" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.462432 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ed8a3365-c6aa-472a-bb41-10905fcabbf6-registry-certificates\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.462451 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5754f3db-279a-4340-9354-8709b29e0398-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-gtb8k\" (UID: \"5754f3db-279a-4340-9354-8709b29e0398\") " pod="openshift-marketplace/marketplace-operator-79b997595-gtb8k" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.462471 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ts64\" (UniqueName: \"kubernetes.io/projected/bc7ef085-1e7b-4bcc-b00b-42deb769d466-kube-api-access-5ts64\") pod \"multus-admission-controller-857f4d67dd-n2z7b\" (UID: \"bc7ef085-1e7b-4bcc-b00b-42deb769d466\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-n2z7b" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.462494 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vc7tv\" (UniqueName: \"kubernetes.io/projected/18b11741-408b-4270-abd3-b43fd24149ab-kube-api-access-vc7tv\") pod \"control-plane-machine-set-operator-78cbb6b69f-drfrd\" (UID: \"18b11741-408b-4270-abd3-b43fd24149ab\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-drfrd" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.462513 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/db4c5b64-6af8-4707-a667-b8f1e8435335-srv-cert\") pod \"olm-operator-6b444d44fb-rgmvn\" (UID: \"db4c5b64-6af8-4707-a667-b8f1e8435335\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rgmvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.462533 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2063cb21-914f-4d0c-a6a7-e8edee1a4f9a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-dwqdt\" (UID: \"2063cb21-914f-4d0c-a6a7-e8edee1a4f9a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dwqdt" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.462554 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpxrl\" (UniqueName: \"kubernetes.io/projected/2b5cafc4-348a-4e37-b2a1-4c51e5111d43-kube-api-access-mpxrl\") pod \"migrator-59844c95c7-66mxw\" (UID: \"2b5cafc4-348a-4e37-b2a1-4c51e5111d43\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-66mxw" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.462574 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b45dc373-92c5-48f1-9a77-309f0553079e-images\") pod \"machine-config-operator-74547568cd-ctcl6\" (UID: \"b45dc373-92c5-48f1-9a77-309f0553079e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ctcl6" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.462595 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvpc5\" (UniqueName: \"kubernetes.io/projected/36ba50bc-f888-4f82-af4c-9879d8089273-kube-api-access-kvpc5\") pod \"service-ca-operator-777779d784-x7gjh\" (UID: \"36ba50bc-f888-4f82-af4c-9879d8089273\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-x7gjh" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.462613 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/fae5f727-ca2f-43b8-9ab9-b6817c537c19-webhook-cert\") pod \"packageserver-d55dfcdfc-94q47\" (UID: \"fae5f727-ca2f-43b8-9ab9-b6817c537c19\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-94q47" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.462633 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-serving-cert\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.462654 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/77ff2b86-bf1c-4eda-abe0-dd8692cca140-certs\") pod \"machine-config-server-vhp6r\" (UID: \"77ff2b86-bf1c-4eda-abe0-dd8692cca140\") " pod="openshift-machine-config-operator/machine-config-server-vhp6r" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.462678 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrvb5\" (UniqueName: \"kubernetes.io/projected/dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9-kube-api-access-hrvb5\") pod \"csi-hostpathplugin-pcp6j\" (UID: \"dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9\") " pod="hostpath-provisioner/csi-hostpathplugin-pcp6j" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.462702 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.462725 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/76d8c7b7-9f4b-4e54-9466-75bef179ce83-metrics-tls\") pod \"dns-default-n2cvg\" (UID: \"76d8c7b7-9f4b-4e54-9466-75bef179ce83\") " pod="openshift-dns/dns-default-n2cvg" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463049 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2063cb21-914f-4d0c-a6a7-e8edee1a4f9a-config\") pod \"kube-apiserver-operator-766d6c64bb-dwqdt\" (UID: \"2063cb21-914f-4d0c-a6a7-e8edee1a4f9a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dwqdt" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463077 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/71be1dc6-76a8-4155-9900-1553a730a64a-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-xf8pm\" (UID: \"71be1dc6-76a8-4155-9900-1553a730a64a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xf8pm" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463109 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463129 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463150 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e528e899-db16-4e75-a55f-4eaa2462b90d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-rb6cf\" (UID: \"e528e899-db16-4e75-a55f-4eaa2462b90d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rb6cf" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463171 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70b6509b-bcfc-4ad7-8d97-ce279333b048-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-v8wzq\" (UID: \"70b6509b-bcfc-4ad7-8d97-ce279333b048\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v8wzq" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463194 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36ba50bc-f888-4f82-af4c-9879d8089273-config\") pod \"service-ca-operator-777779d784-x7gjh\" (UID: \"36ba50bc-f888-4f82-af4c-9879d8089273\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-x7gjh" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463217 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqm54\" (UniqueName: \"kubernetes.io/projected/df329555-e5bd-49b3-bce4-c995b4999755-kube-api-access-lqm54\") pod \"package-server-manager-789f6589d5-v6mzl\" (UID: \"df329555-e5bd-49b3-bce4-c995b4999755\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v6mzl" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463238 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n88c8\" (UniqueName: \"kubernetes.io/projected/76d8c7b7-9f4b-4e54-9466-75bef179ce83-kube-api-access-n88c8\") pod \"dns-default-n2cvg\" (UID: \"76d8c7b7-9f4b-4e54-9466-75bef179ce83\") " pod="openshift-dns/dns-default-n2cvg" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463257 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6wkx\" (UniqueName: \"kubernetes.io/projected/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-kube-api-access-l6wkx\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463278 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e7980e22-75e8-4495-afb8-f486065789c4-secret-volume\") pod \"collect-profiles-29411265-f5k6f\" (UID: \"e7980e22-75e8-4495-afb8-f486065789c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411265-f5k6f" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463300 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9499299a-d0a7-4f0b-a123-9a8d5e3fd0a9-bound-sa-token\") pod \"ingress-operator-5b745b69d9-clflg\" (UID: \"9499299a-d0a7-4f0b-a123-9a8d5e3fd0a9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-clflg" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463323 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dfxx\" (UniqueName: \"kubernetes.io/projected/5754f3db-279a-4340-9354-8709b29e0398-kube-api-access-6dfxx\") pod \"marketplace-operator-79b997595-gtb8k\" (UID: \"5754f3db-279a-4340-9354-8709b29e0398\") " pod="openshift-marketplace/marketplace-operator-79b997595-gtb8k" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463388 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-node-pullsecrets\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463417 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jstd8\" (UniqueName: \"kubernetes.io/projected/db4c5b64-6af8-4707-a667-b8f1e8435335-kube-api-access-jstd8\") pod \"olm-operator-6b444d44fb-rgmvn\" (UID: \"db4c5b64-6af8-4707-a667-b8f1e8435335\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rgmvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463436 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwj8s\" (UniqueName: \"kubernetes.io/projected/9499299a-d0a7-4f0b-a123-9a8d5e3fd0a9-kube-api-access-hwj8s\") pod \"ingress-operator-5b745b69d9-clflg\" (UID: \"9499299a-d0a7-4f0b-a123-9a8d5e3fd0a9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-clflg" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463458 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463482 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czztn\" (UniqueName: \"kubernetes.io/projected/30469e3e-7560-4574-8657-06fe673ed079-kube-api-access-czztn\") pod \"machine-config-controller-84d6567774-8wrt4\" (UID: \"30469e3e-7560-4574-8657-06fe673ed079\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8wrt4" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463504 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/77ff2b86-bf1c-4eda-abe0-dd8692cca140-node-bootstrap-token\") pod \"machine-config-server-vhp6r\" (UID: \"77ff2b86-bf1c-4eda-abe0-dd8692cca140\") " pod="openshift-machine-config-operator/machine-config-server-vhp6r" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463528 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-image-import-ca\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463551 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmtzj\" (UniqueName: \"kubernetes.io/projected/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-kube-api-access-xmtzj\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463571 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/5cd2ab71-ee95-4e70-a189-ef6152172de4-signing-key\") pod \"service-ca-9c57cc56f-bl9q7\" (UID: \"5cd2ab71-ee95-4e70-a189-ef6152172de4\") " pod="openshift-service-ca/service-ca-9c57cc56f-bl9q7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463592 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-audit-policies\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463619 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4slm\" (UniqueName: \"kubernetes.io/projected/b45dc373-92c5-48f1-9a77-309f0553079e-kube-api-access-b4slm\") pod \"machine-config-operator-74547568cd-ctcl6\" (UID: \"b45dc373-92c5-48f1-9a77-309f0553079e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ctcl6" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463640 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/a22a1d4a-00c0-4109-8eff-3492590aeb1a-profile-collector-cert\") pod \"catalog-operator-68c6474976-857kt\" (UID: \"a22a1d4a-00c0-4109-8eff-3492590aeb1a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-857kt" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463662 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9499299a-d0a7-4f0b-a123-9a8d5e3fd0a9-trusted-ca\") pod \"ingress-operator-5b745b69d9-clflg\" (UID: \"9499299a-d0a7-4f0b-a123-9a8d5e3fd0a9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-clflg" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463683 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9-plugins-dir\") pod \"csi-hostpathplugin-pcp6j\" (UID: \"dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9\") " pod="hostpath-provisioner/csi-hostpathplugin-pcp6j" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463717 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9-mountpoint-dir\") pod \"csi-hostpathplugin-pcp6j\" (UID: \"dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9\") " pod="hostpath-provisioner/csi-hostpathplugin-pcp6j" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463813 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ed8a3365-c6aa-472a-bb41-10905fcabbf6-trusted-ca\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463836 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/df329555-e5bd-49b3-bce4-c995b4999755-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-v6mzl\" (UID: \"df329555-e5bd-49b3-bce4-c995b4999755\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v6mzl" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463858 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2063cb21-914f-4d0c-a6a7-e8edee1a4f9a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-dwqdt\" (UID: \"2063cb21-914f-4d0c-a6a7-e8edee1a4f9a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dwqdt" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463881 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e528e899-db16-4e75-a55f-4eaa2462b90d-config\") pod \"kube-controller-manager-operator-78b949d7b-rb6cf\" (UID: \"e528e899-db16-4e75-a55f-4eaa2462b90d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rb6cf" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463901 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/5cd2ab71-ee95-4e70-a189-ef6152172de4-signing-cabundle\") pod \"service-ca-9c57cc56f-bl9q7\" (UID: \"5cd2ab71-ee95-4e70-a189-ef6152172de4\") " pod="openshift-service-ca/service-ca-9c57cc56f-bl9q7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463923 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-etcd-serving-ca\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463946 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-etcd-client\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.463968 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-audit\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464001 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/36ba50bc-f888-4f82-af4c-9879d8089273-serving-cert\") pod \"service-ca-operator-777779d784-x7gjh\" (UID: \"36ba50bc-f888-4f82-af4c-9879d8089273\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-x7gjh" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464022 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5754f3db-279a-4340-9354-8709b29e0398-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-gtb8k\" (UID: \"5754f3db-279a-4340-9354-8709b29e0398\") " pod="openshift-marketplace/marketplace-operator-79b997595-gtb8k" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464046 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464071 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9-socket-dir\") pod \"csi-hostpathplugin-pcp6j\" (UID: \"dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9\") " pod="hostpath-provisioner/csi-hostpathplugin-pcp6j" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464097 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kcps\" (UniqueName: \"kubernetes.io/projected/5cd2ab71-ee95-4e70-a189-ef6152172de4-kube-api-access-6kcps\") pod \"service-ca-9c57cc56f-bl9q7\" (UID: \"5cd2ab71-ee95-4e70-a189-ef6152172de4\") " pod="openshift-service-ca/service-ca-9c57cc56f-bl9q7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464118 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/70b6509b-bcfc-4ad7-8d97-ce279333b048-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-v8wzq\" (UID: \"70b6509b-bcfc-4ad7-8d97-ce279333b048\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v8wzq" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464149 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464171 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4twzf\" (UniqueName: \"kubernetes.io/projected/a8ef44c3-c7a8-4f21-b180-a6c3a9f7e292-kube-api-access-4twzf\") pod \"ingress-canary-mkdl7\" (UID: \"a8ef44c3-c7a8-4f21-b180-a6c3a9f7e292\") " pod="openshift-ingress-canary/ingress-canary-mkdl7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464193 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7fnp\" (UniqueName: \"kubernetes.io/projected/e7980e22-75e8-4495-afb8-f486065789c4-kube-api-access-t7fnp\") pod \"collect-profiles-29411265-f5k6f\" (UID: \"e7980e22-75e8-4495-afb8-f486065789c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411265-f5k6f" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464217 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b45dc373-92c5-48f1-9a77-309f0553079e-proxy-tls\") pod \"machine-config-operator-74547568cd-ctcl6\" (UID: \"b45dc373-92c5-48f1-9a77-309f0553079e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ctcl6" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464239 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464259 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sklvz\" (UniqueName: \"kubernetes.io/projected/fae5f727-ca2f-43b8-9ab9-b6817c537c19-kube-api-access-sklvz\") pod \"packageserver-d55dfcdfc-94q47\" (UID: \"fae5f727-ca2f-43b8-9ab9-b6817c537c19\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-94q47" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464281 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ed8a3365-c6aa-472a-bb41-10905fcabbf6-registry-tls\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464302 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/db4c5b64-6af8-4707-a667-b8f1e8435335-profile-collector-cert\") pod \"olm-operator-6b444d44fb-rgmvn\" (UID: \"db4c5b64-6af8-4707-a667-b8f1e8435335\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rgmvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464322 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ed8a3365-c6aa-472a-bb41-10905fcabbf6-ca-trust-extracted\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464363 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-trusted-ca-bundle\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464388 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmnnc\" (UniqueName: \"kubernetes.io/projected/77ff2b86-bf1c-4eda-abe0-dd8692cca140-kube-api-access-gmnnc\") pod \"machine-config-server-vhp6r\" (UID: \"77ff2b86-bf1c-4eda-abe0-dd8692cca140\") " pod="openshift-machine-config-operator/machine-config-server-vhp6r" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464412 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-audit-dir\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464432 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9499299a-d0a7-4f0b-a123-9a8d5e3fd0a9-metrics-tls\") pod \"ingress-operator-5b745b69d9-clflg\" (UID: \"9499299a-d0a7-4f0b-a123-9a8d5e3fd0a9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-clflg" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464453 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b45dc373-92c5-48f1-9a77-309f0553079e-auth-proxy-config\") pod \"machine-config-operator-74547568cd-ctcl6\" (UID: \"b45dc373-92c5-48f1-9a77-309f0553079e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ctcl6" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464475 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a8ef44c3-c7a8-4f21-b180-a6c3a9f7e292-cert\") pod \"ingress-canary-mkdl7\" (UID: \"a8ef44c3-c7a8-4f21-b180-a6c3a9f7e292\") " pod="openshift-ingress-canary/ingress-canary-mkdl7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464497 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9-registration-dir\") pod \"csi-hostpathplugin-pcp6j\" (UID: \"dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9\") " pod="hostpath-provisioner/csi-hostpathplugin-pcp6j" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464518 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9-csi-data-dir\") pod \"csi-hostpathplugin-pcp6j\" (UID: \"dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9\") " pod="hostpath-provisioner/csi-hostpathplugin-pcp6j" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464538 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/76d8c7b7-9f4b-4e54-9466-75bef179ce83-config-volume\") pod \"dns-default-n2cvg\" (UID: \"76d8c7b7-9f4b-4e54-9466-75bef179ce83\") " pod="openshift-dns/dns-default-n2cvg" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464567 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464588 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/30469e3e-7560-4574-8657-06fe673ed079-proxy-tls\") pod \"machine-config-controller-84d6567774-8wrt4\" (UID: \"30469e3e-7560-4574-8657-06fe673ed079\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8wrt4" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464610 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ed8a3365-c6aa-472a-bb41-10905fcabbf6-installation-pull-secrets\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464633 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464656 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzggc\" (UniqueName: \"kubernetes.io/projected/71be1dc6-76a8-4155-9900-1553a730a64a-kube-api-access-dzggc\") pod \"cluster-samples-operator-665b6dd947-xf8pm\" (UID: \"71be1dc6-76a8-4155-9900-1553a730a64a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xf8pm" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464680 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ed8a3365-c6aa-472a-bb41-10905fcabbf6-bound-sa-token\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464714 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-encryption-config\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464737 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/a22a1d4a-00c0-4109-8eff-3492590aeb1a-srv-cert\") pod \"catalog-operator-68c6474976-857kt\" (UID: \"a22a1d4a-00c0-4109-8eff-3492590aeb1a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-857kt" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464763 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464785 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wscdz\" (UniqueName: \"kubernetes.io/projected/70b6509b-bcfc-4ad7-8d97-ce279333b048-kube-api-access-wscdz\") pod \"kube-storage-version-migrator-operator-b67b599dd-v8wzq\" (UID: \"70b6509b-bcfc-4ad7-8d97-ce279333b048\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v8wzq" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.464807 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-config\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.466182 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpwqh\" (UniqueName: \"kubernetes.io/projected/a22a1d4a-00c0-4109-8eff-3492590aeb1a-kube-api-access-kpwqh\") pod \"catalog-operator-68c6474976-857kt\" (UID: \"a22a1d4a-00c0-4109-8eff-3492590aeb1a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-857kt" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.466224 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qdks\" (UniqueName: \"kubernetes.io/projected/ed8a3365-c6aa-472a-bb41-10905fcabbf6-kube-api-access-5qdks\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.466251 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e7980e22-75e8-4495-afb8-f486065789c4-config-volume\") pod \"collect-profiles-29411265-f5k6f\" (UID: \"e7980e22-75e8-4495-afb8-f486065789c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411265-f5k6f" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.466275 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/fae5f727-ca2f-43b8-9ab9-b6817c537c19-tmpfs\") pod \"packageserver-d55dfcdfc-94q47\" (UID: \"fae5f727-ca2f-43b8-9ab9-b6817c537c19\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-94q47" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.466326 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/bc7ef085-1e7b-4bcc-b00b-42deb769d466-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-n2z7b\" (UID: \"bc7ef085-1e7b-4bcc-b00b-42deb769d466\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-n2z7b" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.466378 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/30469e3e-7560-4574-8657-06fe673ed079-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-8wrt4\" (UID: \"30469e3e-7560-4574-8657-06fe673ed079\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8wrt4" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.466403 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/18b11741-408b-4270-abd3-b43fd24149ab-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-drfrd\" (UID: \"18b11741-408b-4270-abd3-b43fd24149ab\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-drfrd" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.467461 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-audit-dir\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.468619 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ed8a3365-c6aa-472a-bb41-10905fcabbf6-trusted-ca\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.471569 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ed8a3365-c6aa-472a-bb41-10905fcabbf6-registry-certificates\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.472549 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-audit-dir\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.472598 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/18b11741-408b-4270-abd3-b43fd24149ab-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-drfrd\" (UID: \"18b11741-408b-4270-abd3-b43fd24149ab\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-drfrd" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.473229 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.473564 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-audit-policies\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.473805 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.474502 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.474886 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9499299a-d0a7-4f0b-a123-9a8d5e3fd0a9-metrics-tls\") pod \"ingress-operator-5b745b69d9-clflg\" (UID: \"9499299a-d0a7-4f0b-a123-9a8d5e3fd0a9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-clflg" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.475380 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/fae5f727-ca2f-43b8-9ab9-b6817c537c19-webhook-cert\") pod \"packageserver-d55dfcdfc-94q47\" (UID: \"fae5f727-ca2f-43b8-9ab9-b6817c537c19\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-94q47" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.475532 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-config\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.475730 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-image-import-ca\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.476181 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ed8a3365-c6aa-472a-bb41-10905fcabbf6-ca-trust-extracted\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.476493 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.476886 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9499299a-d0a7-4f0b-a123-9a8d5e3fd0a9-trusted-ca\") pod \"ingress-operator-5b745b69d9-clflg\" (UID: \"9499299a-d0a7-4f0b-a123-9a8d5e3fd0a9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-clflg" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.476959 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b45dc373-92c5-48f1-9a77-309f0553079e-images\") pod \"machine-config-operator-74547568cd-ctcl6\" (UID: \"b45dc373-92c5-48f1-9a77-309f0553079e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ctcl6" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.479681 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/30469e3e-7560-4574-8657-06fe673ed079-proxy-tls\") pod \"machine-config-controller-84d6567774-8wrt4\" (UID: \"30469e3e-7560-4574-8657-06fe673ed079\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8wrt4" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.480434 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/db4c5b64-6af8-4707-a667-b8f1e8435335-srv-cert\") pod \"olm-operator-6b444d44fb-rgmvn\" (UID: \"db4c5b64-6af8-4707-a667-b8f1e8435335\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rgmvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.481378 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e528e899-db16-4e75-a55f-4eaa2462b90d-config\") pod \"kube-controller-manager-operator-78b949d7b-rb6cf\" (UID: \"e528e899-db16-4e75-a55f-4eaa2462b90d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rb6cf" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.482239 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70b6509b-bcfc-4ad7-8d97-ce279333b048-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-v8wzq\" (UID: \"70b6509b-bcfc-4ad7-8d97-ce279333b048\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v8wzq" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.482478 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/5cd2ab71-ee95-4e70-a189-ef6152172de4-signing-cabundle\") pod \"service-ca-9c57cc56f-bl9q7\" (UID: \"5cd2ab71-ee95-4e70-a189-ef6152172de4\") " pod="openshift-service-ca/service-ca-9c57cc56f-bl9q7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.482542 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5754f3db-279a-4340-9354-8709b29e0398-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-gtb8k\" (UID: \"5754f3db-279a-4340-9354-8709b29e0398\") " pod="openshift-marketplace/marketplace-operator-79b997595-gtb8k" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.482711 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-etcd-serving-ca\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.483221 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36ba50bc-f888-4f82-af4c-9879d8089273-config\") pod \"service-ca-operator-777779d784-x7gjh\" (UID: \"36ba50bc-f888-4f82-af4c-9879d8089273\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-x7gjh" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.483400 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-trusted-ca-bundle\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.484826 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e7980e22-75e8-4495-afb8-f486065789c4-config-volume\") pod \"collect-profiles-29411265-f5k6f\" (UID: \"e7980e22-75e8-4495-afb8-f486065789c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411265-f5k6f" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.485661 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/fae5f727-ca2f-43b8-9ab9-b6817c537c19-tmpfs\") pod \"packageserver-d55dfcdfc-94q47\" (UID: \"fae5f727-ca2f-43b8-9ab9-b6817c537c19\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-94q47" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.486172 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b45dc373-92c5-48f1-9a77-309f0553079e-auth-proxy-config\") pod \"machine-config-operator-74547568cd-ctcl6\" (UID: \"b45dc373-92c5-48f1-9a77-309f0553079e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ctcl6" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.489610 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2063cb21-914f-4d0c-a6a7-e8edee1a4f9a-config\") pod \"kube-apiserver-operator-766d6c64bb-dwqdt\" (UID: \"2063cb21-914f-4d0c-a6a7-e8edee1a4f9a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dwqdt" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.495571 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: E1202 11:53:34.495662 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:34.995643371 +0000 UTC m=+144.734193720 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.496056 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-audit\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.497095 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-node-pullsecrets\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.497774 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/30469e3e-7560-4574-8657-06fe673ed079-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-8wrt4\" (UID: \"30469e3e-7560-4574-8657-06fe673ed079\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8wrt4" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.506108 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b45dc373-92c5-48f1-9a77-309f0553079e-proxy-tls\") pod \"machine-config-operator-74547568cd-ctcl6\" (UID: \"b45dc373-92c5-48f1-9a77-309f0553079e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ctcl6" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.518901 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e528e899-db16-4e75-a55f-4eaa2462b90d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-rb6cf\" (UID: \"e528e899-db16-4e75-a55f-4eaa2462b90d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rb6cf" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.520143 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/bc7ef085-1e7b-4bcc-b00b-42deb769d466-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-n2z7b\" (UID: \"bc7ef085-1e7b-4bcc-b00b-42deb769d466\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-n2z7b" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.525514 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.525867 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/36ba50bc-f888-4f82-af4c-9879d8089273-serving-cert\") pod \"service-ca-operator-777779d784-x7gjh\" (UID: \"36ba50bc-f888-4f82-af4c-9879d8089273\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-x7gjh" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.526479 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzggc\" (UniqueName: \"kubernetes.io/projected/71be1dc6-76a8-4155-9900-1553a730a64a-kube-api-access-dzggc\") pod \"cluster-samples-operator-665b6dd947-xf8pm\" (UID: \"71be1dc6-76a8-4155-9900-1553a730a64a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xf8pm" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.528303 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2063cb21-914f-4d0c-a6a7-e8edee1a4f9a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-dwqdt\" (UID: \"2063cb21-914f-4d0c-a6a7-e8edee1a4f9a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dwqdt" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.529232 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-etcd-client\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.529561 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/70b6509b-bcfc-4ad7-8d97-ce279333b048-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-v8wzq\" (UID: \"70b6509b-bcfc-4ad7-8d97-ce279333b048\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v8wzq" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.530724 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/a22a1d4a-00c0-4109-8eff-3492590aeb1a-srv-cert\") pod \"catalog-operator-68c6474976-857kt\" (UID: \"a22a1d4a-00c0-4109-8eff-3492590aeb1a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-857kt" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.531190 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ed8a3365-c6aa-472a-bb41-10905fcabbf6-registry-tls\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.531764 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/5cd2ab71-ee95-4e70-a189-ef6152172de4-signing-key\") pod \"service-ca-9c57cc56f-bl9q7\" (UID: \"5cd2ab71-ee95-4e70-a189-ef6152172de4\") " pod="openshift-service-ca/service-ca-9c57cc56f-bl9q7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.532235 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/a22a1d4a-00c0-4109-8eff-3492590aeb1a-profile-collector-cert\") pod \"catalog-operator-68c6474976-857kt\" (UID: \"a22a1d4a-00c0-4109-8eff-3492590aeb1a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-857kt" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.532556 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.532793 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.533190 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ed8a3365-c6aa-472a-bb41-10905fcabbf6-installation-pull-secrets\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.533995 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/db4c5b64-6af8-4707-a667-b8f1e8435335-profile-collector-cert\") pod \"olm-operator-6b444d44fb-rgmvn\" (UID: \"db4c5b64-6af8-4707-a667-b8f1e8435335\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rgmvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.534020 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-encryption-config\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.534406 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-serving-cert\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.535675 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.536006 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5754f3db-279a-4340-9354-8709b29e0398-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-gtb8k\" (UID: \"5754f3db-279a-4340-9354-8709b29e0398\") " pod="openshift-marketplace/marketplace-operator-79b997595-gtb8k" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.536187 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.536293 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a8ef44c3-c7a8-4f21-b180-a6c3a9f7e292-cert\") pod \"ingress-canary-mkdl7\" (UID: \"a8ef44c3-c7a8-4f21-b180-a6c3a9f7e292\") " pod="openshift-ingress-canary/ingress-canary-mkdl7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.536412 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.536706 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/df329555-e5bd-49b3-bce4-c995b4999755-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-v6mzl\" (UID: \"df329555-e5bd-49b3-bce4-c995b4999755\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v6mzl" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.537054 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/71be1dc6-76a8-4155-9900-1553a730a64a-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-xf8pm\" (UID: \"71be1dc6-76a8-4155-9900-1553a730a64a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xf8pm" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.537795 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e7980e22-75e8-4495-afb8-f486065789c4-secret-volume\") pod \"collect-profiles-29411265-f5k6f\" (UID: \"e7980e22-75e8-4495-afb8-f486065789c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411265-f5k6f" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.538667 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/fae5f727-ca2f-43b8-9ab9-b6817c537c19-apiservice-cert\") pod \"packageserver-d55dfcdfc-94q47\" (UID: \"fae5f727-ca2f-43b8-9ab9-b6817c537c19\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-94q47" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.539100 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6wkx\" (UniqueName: \"kubernetes.io/projected/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-kube-api-access-l6wkx\") pod \"oauth-openshift-558db77b4-wb9r7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.541076 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-vhhcx" event={"ID":"ac4ba204-8204-441b-9e16-5ec66a277675","Type":"ContainerStarted","Data":"5f446861879d7b58fb3f0386b58aad464d81fd0297355901f00e07bfd4139fba"} Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.553166 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dfxx\" (UniqueName: \"kubernetes.io/projected/5754f3db-279a-4340-9354-8709b29e0398-kube-api-access-6dfxx\") pod \"marketplace-operator-79b997595-gtb8k\" (UID: \"5754f3db-279a-4340-9354-8709b29e0398\") " pod="openshift-marketplace/marketplace-operator-79b997595-gtb8k" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.566587 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v2gnh" event={"ID":"995ba10c-c212-4afd-a95e-a03cbac4d08e","Type":"ContainerStarted","Data":"1ecdda3bd1163d4e23436ab77551c4f6a0c7715c85d9ef77044e3814a677231f"} Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.567307 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/77ff2b86-bf1c-4eda-abe0-dd8692cca140-certs\") pod \"machine-config-server-vhp6r\" (UID: \"77ff2b86-bf1c-4eda-abe0-dd8692cca140\") " pod="openshift-machine-config-operator/machine-config-server-vhp6r" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.567423 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrvb5\" (UniqueName: \"kubernetes.io/projected/dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9-kube-api-access-hrvb5\") pod \"csi-hostpathplugin-pcp6j\" (UID: \"dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9\") " pod="hostpath-provisioner/csi-hostpathplugin-pcp6j" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.567456 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/76d8c7b7-9f4b-4e54-9466-75bef179ce83-metrics-tls\") pod \"dns-default-n2cvg\" (UID: \"76d8c7b7-9f4b-4e54-9466-75bef179ce83\") " pod="openshift-dns/dns-default-n2cvg" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.567509 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n88c8\" (UniqueName: \"kubernetes.io/projected/76d8c7b7-9f4b-4e54-9466-75bef179ce83-kube-api-access-n88c8\") pod \"dns-default-n2cvg\" (UID: \"76d8c7b7-9f4b-4e54-9466-75bef179ce83\") " pod="openshift-dns/dns-default-n2cvg" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.567572 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/77ff2b86-bf1c-4eda-abe0-dd8692cca140-node-bootstrap-token\") pod \"machine-config-server-vhp6r\" (UID: \"77ff2b86-bf1c-4eda-abe0-dd8692cca140\") " pod="openshift-machine-config-operator/machine-config-server-vhp6r" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.567621 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9-plugins-dir\") pod \"csi-hostpathplugin-pcp6j\" (UID: \"dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9\") " pod="hostpath-provisioner/csi-hostpathplugin-pcp6j" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.567658 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9-mountpoint-dir\") pod \"csi-hostpathplugin-pcp6j\" (UID: \"dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9\") " pod="hostpath-provisioner/csi-hostpathplugin-pcp6j" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.567698 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9-socket-dir\") pod \"csi-hostpathplugin-pcp6j\" (UID: \"dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9\") " pod="hostpath-provisioner/csi-hostpathplugin-pcp6j" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.567723 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.567782 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmnnc\" (UniqueName: \"kubernetes.io/projected/77ff2b86-bf1c-4eda-abe0-dd8692cca140-kube-api-access-gmnnc\") pod \"machine-config-server-vhp6r\" (UID: \"77ff2b86-bf1c-4eda-abe0-dd8692cca140\") " pod="openshift-machine-config-operator/machine-config-server-vhp6r" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.567814 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9-registration-dir\") pod \"csi-hostpathplugin-pcp6j\" (UID: \"dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9\") " pod="hostpath-provisioner/csi-hostpathplugin-pcp6j" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.567837 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9-csi-data-dir\") pod \"csi-hostpathplugin-pcp6j\" (UID: \"dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9\") " pod="hostpath-provisioner/csi-hostpathplugin-pcp6j" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.567857 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/76d8c7b7-9f4b-4e54-9466-75bef179ce83-config-volume\") pod \"dns-default-n2cvg\" (UID: \"76d8c7b7-9f4b-4e54-9466-75bef179ce83\") " pod="openshift-dns/dns-default-n2cvg" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.568169 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvpc5\" (UniqueName: \"kubernetes.io/projected/36ba50bc-f888-4f82-af4c-9879d8089273-kube-api-access-kvpc5\") pod \"service-ca-operator-777779d784-x7gjh\" (UID: \"36ba50bc-f888-4f82-af4c-9879d8089273\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-x7gjh" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.568179 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9-mountpoint-dir\") pod \"csi-hostpathplugin-pcp6j\" (UID: \"dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9\") " pod="hostpath-provisioner/csi-hostpathplugin-pcp6j" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.569843 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9-plugins-dir\") pod \"csi-hostpathplugin-pcp6j\" (UID: \"dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9\") " pod="hostpath-provisioner/csi-hostpathplugin-pcp6j" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.569945 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-zjwgh" event={"ID":"b464a77f-4bf7-4719-98c2-d07e5bde858a","Type":"ContainerStarted","Data":"753d53b5575f47c2cf307c1cac5f4aeef21c70c2b495baf06a1eea4ccc45e0e2"} Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.570149 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9-socket-dir\") pod \"csi-hostpathplugin-pcp6j\" (UID: \"dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9\") " pod="hostpath-provisioner/csi-hostpathplugin-pcp6j" Dec 02 11:53:34 crc kubenswrapper[4721]: E1202 11:53:34.570437 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:35.070423007 +0000 UTC m=+144.808973356 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.570914 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9-registration-dir\") pod \"csi-hostpathplugin-pcp6j\" (UID: \"dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9\") " pod="hostpath-provisioner/csi-hostpathplugin-pcp6j" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.571066 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9-csi-data-dir\") pod \"csi-hostpathplugin-pcp6j\" (UID: \"dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9\") " pod="hostpath-provisioner/csi-hostpathplugin-pcp6j" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.572319 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-wtlwb" event={"ID":"a26573b3-3899-4b81-ba01-fabea24142cc","Type":"ContainerStarted","Data":"cc306db3bcb0f57290a10992d481154151a9d746e77938879bec3356a025af49"} Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.572382 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-wtlwb" event={"ID":"a26573b3-3899-4b81-ba01-fabea24142cc","Type":"ContainerStarted","Data":"ff4c5081be98e60bccff334ebc73afb7cc6d2df0597b198267fdd3b8264a6b23"} Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.573124 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/76d8c7b7-9f4b-4e54-9466-75bef179ce83-metrics-tls\") pod \"dns-default-n2cvg\" (UID: \"76d8c7b7-9f4b-4e54-9466-75bef179ce83\") " pod="openshift-dns/dns-default-n2cvg" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.573752 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/76d8c7b7-9f4b-4e54-9466-75bef179ce83-config-volume\") pod \"dns-default-n2cvg\" (UID: \"76d8c7b7-9f4b-4e54-9466-75bef179ce83\") " pod="openshift-dns/dns-default-n2cvg" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.577050 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/77ff2b86-bf1c-4eda-abe0-dd8692cca140-node-bootstrap-token\") pod \"machine-config-server-vhp6r\" (UID: \"77ff2b86-bf1c-4eda-abe0-dd8692cca140\") " pod="openshift-machine-config-operator/machine-config-server-vhp6r" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.577272 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pf5mn" event={"ID":"a275db4b-1993-4d42-b623-2bc621a59042","Type":"ContainerStarted","Data":"81473ac4282a9ff6f5e76af62d284915849c5166810f65942da5c6ef6549c1b4"} Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.577805 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/77ff2b86-bf1c-4eda-abe0-dd8692cca140-certs\") pod \"machine-config-server-vhp6r\" (UID: \"77ff2b86-bf1c-4eda-abe0-dd8692cca140\") " pod="openshift-machine-config-operator/machine-config-server-vhp6r" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.592106 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmtzj\" (UniqueName: \"kubernetes.io/projected/70910e18-bc62-4cb7-bc07-0dc3a8a157ce-kube-api-access-xmtzj\") pod \"apiserver-76f77b778f-q8vvn\" (UID: \"70910e18-bc62-4cb7-bc07-0dc3a8a157ce\") " pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.594299 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-fg2ch" event={"ID":"c02678af-0ebc-4689-ad5c-1300cb95439e","Type":"ContainerStarted","Data":"5a73a4f9def9655d1ab4c09791dc006bffee5dc63ca09b1e45747dfe726eee45"} Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.621181 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.622250 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-57995"] Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.622748 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wscdz\" (UniqueName: \"kubernetes.io/projected/70b6509b-bcfc-4ad7-8d97-ce279333b048-kube-api-access-wscdz\") pod \"kube-storage-version-migrator-operator-b67b599dd-v8wzq\" (UID: \"70b6509b-bcfc-4ad7-8d97-ce279333b048\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v8wzq" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.626397 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xf8pm" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.629261 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-5cfgp" event={"ID":"33a6b5de-5ce1-4e23-b13d-feda684e25e0","Type":"ContainerStarted","Data":"0f35da0d07a3b8d6b5e7a3ecabbda5f483baecafab2dbac708d37b7daf06fff2"} Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.642212 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-gtb8k" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.642508 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ed8a3365-c6aa-472a-bb41-10905fcabbf6-bound-sa-token\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.642624 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xwtjj" event={"ID":"aa569267-90f6-47ac-8cf0-467ba17c8e06","Type":"ContainerStarted","Data":"0b1d940f8a42df4a3c5771cde33841c764f7d126a09e8d2c36a98b23c1d6568e"} Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.642653 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xwtjj" event={"ID":"aa569267-90f6-47ac-8cf0-467ba17c8e06","Type":"ContainerStarted","Data":"e7c4b0e338173141b48b8f68df643aa63a093d787bf63f98d758bce547f8e5c5"} Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.643770 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-tl5t9"] Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.658318 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jstd8\" (UniqueName: \"kubernetes.io/projected/db4c5b64-6af8-4707-a667-b8f1e8435335-kube-api-access-jstd8\") pod \"olm-operator-6b444d44fb-rgmvn\" (UID: \"db4c5b64-6af8-4707-a667-b8f1e8435335\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rgmvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.659208 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tgqm"] Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.663557 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.668391 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:34 crc kubenswrapper[4721]: E1202 11:53:34.669467 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:35.169451678 +0000 UTC m=+144.908002027 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.679290 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl"] Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.684951 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9499299a-d0a7-4f0b-a123-9a8d5e3fd0a9-bound-sa-token\") pod \"ingress-operator-5b745b69d9-clflg\" (UID: \"9499299a-d0a7-4f0b-a123-9a8d5e3fd0a9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-clflg" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.697437 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v8wzq" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.705628 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-x7gjh" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.709536 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-bj7cn"] Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.717424 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4twzf\" (UniqueName: \"kubernetes.io/projected/a8ef44c3-c7a8-4f21-b180-a6c3a9f7e292-kube-api-access-4twzf\") pod \"ingress-canary-mkdl7\" (UID: \"a8ef44c3-c7a8-4f21-b180-a6c3a9f7e292\") " pod="openshift-ingress-canary/ingress-canary-mkdl7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.717600 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rgmvn" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.722032 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-mkdl7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.724168 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-rv854"] Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.733093 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czztn\" (UniqueName: \"kubernetes.io/projected/30469e3e-7560-4574-8657-06fe673ed079-kube-api-access-czztn\") pod \"machine-config-controller-84d6567774-8wrt4\" (UID: \"30469e3e-7560-4574-8657-06fe673ed079\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8wrt4" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.752040 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-brktp"] Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.753077 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4slm\" (UniqueName: \"kubernetes.io/projected/b45dc373-92c5-48f1-9a77-309f0553079e-kube-api-access-b4slm\") pod \"machine-config-operator-74547568cd-ctcl6\" (UID: \"b45dc373-92c5-48f1-9a77-309f0553079e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ctcl6" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.771991 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:34 crc kubenswrapper[4721]: E1202 11:53:34.772356 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:35.272316772 +0000 UTC m=+145.010867121 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.788192 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vc7tv\" (UniqueName: \"kubernetes.io/projected/18b11741-408b-4270-abd3-b43fd24149ab-kube-api-access-vc7tv\") pod \"control-plane-machine-set-operator-78cbb6b69f-drfrd\" (UID: \"18b11741-408b-4270-abd3-b43fd24149ab\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-drfrd" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.789274 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sklvz\" (UniqueName: \"kubernetes.io/projected/fae5f727-ca2f-43b8-9ab9-b6817c537c19-kube-api-access-sklvz\") pod \"packageserver-d55dfcdfc-94q47\" (UID: \"fae5f727-ca2f-43b8-9ab9-b6817c537c19\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-94q47" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.792672 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-qx7nr"] Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.820126 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kcps\" (UniqueName: \"kubernetes.io/projected/5cd2ab71-ee95-4e70-a189-ef6152172de4-kube-api-access-6kcps\") pod \"service-ca-9c57cc56f-bl9q7\" (UID: \"5cd2ab71-ee95-4e70-a189-ef6152172de4\") " pod="openshift-service-ca/service-ca-9c57cc56f-bl9q7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.850113 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqm54\" (UniqueName: \"kubernetes.io/projected/df329555-e5bd-49b3-bce4-c995b4999755-kube-api-access-lqm54\") pod \"package-server-manager-789f6589d5-v6mzl\" (UID: \"df329555-e5bd-49b3-bce4-c995b4999755\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v6mzl" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.873001 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:34 crc kubenswrapper[4721]: E1202 11:53:34.874999 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:35.374972129 +0000 UTC m=+145.113522478 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.883194 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpxrl\" (UniqueName: \"kubernetes.io/projected/2b5cafc4-348a-4e37-b2a1-4c51e5111d43-kube-api-access-mpxrl\") pod \"migrator-59844c95c7-66mxw\" (UID: \"2b5cafc4-348a-4e37-b2a1-4c51e5111d43\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-66mxw" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.888278 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ts64\" (UniqueName: \"kubernetes.io/projected/bc7ef085-1e7b-4bcc-b00b-42deb769d466-kube-api-access-5ts64\") pod \"multus-admission-controller-857f4d67dd-n2z7b\" (UID: \"bc7ef085-1e7b-4bcc-b00b-42deb769d466\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-n2z7b" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.889479 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qdks\" (UniqueName: \"kubernetes.io/projected/ed8a3365-c6aa-472a-bb41-10905fcabbf6-kube-api-access-5qdks\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.895679 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8wrt4" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.916153 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ctcl6" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.925026 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2063cb21-914f-4d0c-a6a7-e8edee1a4f9a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-dwqdt\" (UID: \"2063cb21-914f-4d0c-a6a7-e8edee1a4f9a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dwqdt" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.932163 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-drfrd" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.943552 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e528e899-db16-4e75-a55f-4eaa2462b90d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-rb6cf\" (UID: \"e528e899-db16-4e75-a55f-4eaa2462b90d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rb6cf" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.944487 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-66mxw" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.951918 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v6mzl" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.957857 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-n2z7b" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.958990 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpwqh\" (UniqueName: \"kubernetes.io/projected/a22a1d4a-00c0-4109-8eff-3492590aeb1a-kube-api-access-kpwqh\") pod \"catalog-operator-68c6474976-857kt\" (UID: \"a22a1d4a-00c0-4109-8eff-3492590aeb1a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-857kt" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.967490 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7fnp\" (UniqueName: \"kubernetes.io/projected/e7980e22-75e8-4495-afb8-f486065789c4-kube-api-access-t7fnp\") pod \"collect-profiles-29411265-f5k6f\" (UID: \"e7980e22-75e8-4495-afb8-f486065789c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411265-f5k6f" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.971784 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411265-f5k6f" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.976552 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:34 crc kubenswrapper[4721]: E1202 11:53:34.976984 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:35.476967255 +0000 UTC m=+145.215517604 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.977758 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-bl9q7" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.982814 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-94q47" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.983543 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-wtlwb" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.986937 4721 patch_prober.go:28] interesting pod/router-default-5444994796-wtlwb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 11:53:34 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Dec 02 11:53:34 crc kubenswrapper[4721]: [+]process-running ok Dec 02 11:53:34 crc kubenswrapper[4721]: healthz check failed Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.986975 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtlwb" podUID="a26573b3-3899-4b81-ba01-fabea24142cc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.987440 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwj8s\" (UniqueName: \"kubernetes.io/projected/9499299a-d0a7-4f0b-a123-9a8d5e3fd0a9-kube-api-access-hwj8s\") pod \"ingress-operator-5b745b69d9-clflg\" (UID: \"9499299a-d0a7-4f0b-a123-9a8d5e3fd0a9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-clflg" Dec 02 11:53:34 crc kubenswrapper[4721]: I1202 11:53:34.992959 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-857kt" Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.003784 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrvb5\" (UniqueName: \"kubernetes.io/projected/dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9-kube-api-access-hrvb5\") pod \"csi-hostpathplugin-pcp6j\" (UID: \"dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9\") " pod="hostpath-provisioner/csi-hostpathplugin-pcp6j" Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.014990 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dwqdt" Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.025511 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n88c8\" (UniqueName: \"kubernetes.io/projected/76d8c7b7-9f4b-4e54-9466-75bef179ce83-kube-api-access-n88c8\") pod \"dns-default-n2cvg\" (UID: \"76d8c7b7-9f4b-4e54-9466-75bef179ce83\") " pod="openshift-dns/dns-default-n2cvg" Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.049467 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-pcp6j" Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.051409 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmnnc\" (UniqueName: \"kubernetes.io/projected/77ff2b86-bf1c-4eda-abe0-dd8692cca140-kube-api-access-gmnnc\") pod \"machine-config-server-vhp6r\" (UID: \"77ff2b86-bf1c-4eda-abe0-dd8692cca140\") " pod="openshift-machine-config-operator/machine-config-server-vhp6r" Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.053536 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-n2cvg" Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.060796 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s"] Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.077383 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:35 crc kubenswrapper[4721]: E1202 11:53:35.077819 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:35.577801924 +0000 UTC m=+145.316352263 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.179301 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:35 crc kubenswrapper[4721]: E1202 11:53:35.191847 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:35.691825842 +0000 UTC m=+145.430376191 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.204650 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rb6cf" Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.218412 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-clflg" Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.281665 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-mkdl7"] Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.293281 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:35 crc kubenswrapper[4721]: E1202 11:53:35.293645 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:35.793629491 +0000 UTC m=+145.532179840 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.330805 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-vhp6r" Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.394495 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:35 crc kubenswrapper[4721]: E1202 11:53:35.395749 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:35.895723991 +0000 UTC m=+145.634274370 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.403215 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gtb8k"] Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.408622 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-wb9r7"] Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.413113 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xf8pm"] Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.501679 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:35 crc kubenswrapper[4721]: E1202 11:53:35.502228 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:36.002212009 +0000 UTC m=+145.740762358 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.607564 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:35 crc kubenswrapper[4721]: E1202 11:53:35.608026 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:36.108013865 +0000 UTC m=+145.846564214 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.638700 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v8wzq"] Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.651384 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-q8vvn"] Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.687781 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-x7gjh"] Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.688094 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rgmvn"] Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.698309 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-8wrt4"] Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.700812 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-qx7nr" event={"ID":"78c789c6-4ac0-4a7b-8cb6-b072909ccf03","Type":"ContainerStarted","Data":"1631493b23b07aabe07a0abdc822bac9bf6ce0c4d725a99b19315a191b161fa3"} Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.700846 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-qx7nr" event={"ID":"78c789c6-4ac0-4a7b-8cb6-b072909ccf03","Type":"ContainerStarted","Data":"8b0250f54efd9529a71b974c572ac3def189db9c1665f908c2204df6e66cc6ee"} Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.701325 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-qx7nr" Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.706464 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" event={"ID":"0f706843-68b8-4598-9482-9c1361f60a63","Type":"ContainerStarted","Data":"7bb576ce88e512360e42b6163cd818688392d43df5f38a4564657a9cfcde610e"} Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.716152 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:35 crc kubenswrapper[4721]: E1202 11:53:35.716921 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:36.216899449 +0000 UTC m=+145.955449798 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.719997 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-57995" event={"ID":"bc778281-fc4f-4592-a805-e03aac7643b1","Type":"ContainerStarted","Data":"4515fadf3654ed25c971ca8ac6f79ca48cf65ec00203378e9c69bdb54e69a849"} Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.720034 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-57995" event={"ID":"bc778281-fc4f-4592-a805-e03aac7643b1","Type":"ContainerStarted","Data":"fbf01d96c26622e92e0e0738b03363acb265c9f8f36cb43ab5e085d3ca1e8063"} Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.728882 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pf5mn" event={"ID":"a275db4b-1993-4d42-b623-2bc621a59042","Type":"ContainerStarted","Data":"5d59eefb343fe989d8e0ab180fc6fb9d1f913d144ebbca0485313650d41f1383"} Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.731617 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-brktp" event={"ID":"213e4819-aa67-42ad-954c-5a461b370407","Type":"ContainerStarted","Data":"32b452cff28668cde7cd57d84a2134f5bb69d58d6ad02f2fae36635788085852"} Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.744062 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-tl5t9" event={"ID":"54beb89c-d2c3-42c8-a186-465ebcbb4e4b","Type":"ContainerStarted","Data":"cc75ae1159d7afc879ee31f343f27b664a67d0c8a26b3a98e093bffb45575bf7"} Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.744100 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-tl5t9" event={"ID":"54beb89c-d2c3-42c8-a186-465ebcbb4e4b","Type":"ContainerStarted","Data":"bedeeba38aedbf28098686e08864da599226517a093b310907169f564f7150e9"} Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.746912 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-mkdl7" event={"ID":"a8ef44c3-c7a8-4f21-b180-a6c3a9f7e292","Type":"ContainerStarted","Data":"ed4d440b7062e277da1787242287a0c08b9b5b30abb9e1a4144dcc4c0e98d165"} Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.777327 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-fg2ch" event={"ID":"c02678af-0ebc-4689-ad5c-1300cb95439e","Type":"ContainerStarted","Data":"6ef1462510924be5e6b16aac7a5cea0c7ec789fdc3cac653d29440f9b28ce12e"} Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.777786 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-fg2ch" Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.780201 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" event={"ID":"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7","Type":"ContainerStarted","Data":"55249d29ce7bb50c0d27a31f902d2db842411af786e1d061bbf888a22f19e6b8"} Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.785130 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl" event={"ID":"1f4e7fae-d9a4-4f20-8658-4544860eaced","Type":"ContainerStarted","Data":"be86e2e22779a32b69dd75add2f84c7d7f50c0005c3c4fdfaeef37b9a1518daa"} Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.785170 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl" event={"ID":"1f4e7fae-d9a4-4f20-8658-4544860eaced","Type":"ContainerStarted","Data":"9b98fac2cee8c9550eacc6af151eb41f881d0d750ac0ebf9f30b82e55e6bbbdf"} Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.785316 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl" Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.790501 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-vhhcx" event={"ID":"ac4ba204-8204-441b-9e16-5ec66a277675","Type":"ContainerStarted","Data":"0936b3ceb95d7a8732a4647fcf0959acef5e644ada8658ae52a6776a4564a23b"} Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.794913 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-zjwgh" event={"ID":"b464a77f-4bf7-4719-98c2-d07e5bde858a","Type":"ContainerStarted","Data":"3b2e2ec8c4593b17e8140f8c1d3db53bf4a9d73b823b119e95651c904649d69e"} Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.794947 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-zjwgh" event={"ID":"b464a77f-4bf7-4719-98c2-d07e5bde858a","Type":"ContainerStarted","Data":"aecc5eae4d89b4572f077c8f6dd6f1d6a4b60feef36385f18f5175e45f1f5d31"} Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.800702 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-gtb8k" event={"ID":"5754f3db-279a-4340-9354-8709b29e0398","Type":"ContainerStarted","Data":"49bd301eff21eb1089700a1612d7e21a5e2bbcb754b662ade99b5440d8bdf391"} Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.803272 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v2gnh" event={"ID":"995ba10c-c212-4afd-a95e-a03cbac4d08e","Type":"ContainerStarted","Data":"000a9aa56eec2357621e23b649f4a645728dd0479b82d33232b6a62be999d1fa"} Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.807246 4721 generic.go:334] "Generic (PLEG): container finished" podID="0863475f-c2fe-4b3c-abca-eaecb1e26561" containerID="f4245bfec2e0c0749ca12ecbbcfb6ef217f4d9d960a8c6c11b95aa93c86a76c4" exitCode=0 Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.807324 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bj7cn" event={"ID":"0863475f-c2fe-4b3c-abca-eaecb1e26561","Type":"ContainerDied","Data":"f4245bfec2e0c0749ca12ecbbcfb6ef217f4d9d960a8c6c11b95aa93c86a76c4"} Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.807365 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bj7cn" event={"ID":"0863475f-c2fe-4b3c-abca-eaecb1e26561","Type":"ContainerStarted","Data":"59d283f44a74047d64f1780ab708f6e7588f2d358677a8f024a7e0654a082942"} Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.813312 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-rv854" event={"ID":"7d94bdd0-bfa5-4e54-a39a-8d8f64e10f99","Type":"ContainerStarted","Data":"ae7d1ec7fdb82bf98c1942eff3059cb8309cd0fd78fcd84d72bc34bea71280f9"} Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.813503 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-rv854" event={"ID":"7d94bdd0-bfa5-4e54-a39a-8d8f64e10f99","Type":"ContainerStarted","Data":"5d2baaa163bc9ed4cedadb23316118410fb4adcd634981c4023d61756badbe42"} Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.813678 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-rv854" Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.841190 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:35 crc kubenswrapper[4721]: E1202 11:53:35.848876 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:36.348857813 +0000 UTC m=+146.087408162 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.867464 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tgqm" event={"ID":"984ee29c-596a-48e8-80a4-cf8fda878e7f","Type":"ContainerStarted","Data":"9485bd5cfb1648814a6e609b1d5555c17b2e01781adb8a0a4d5ab3f3e11ac20c"} Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.867606 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tgqm" event={"ID":"984ee29c-596a-48e8-80a4-cf8fda878e7f","Type":"ContainerStarted","Data":"c419231af96fd55e4960a1465f37e86e7e0acad5bb8708d0b57dcb8b604a9e24"} Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.870437 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-5cfgp" event={"ID":"33a6b5de-5ce1-4e23-b13d-feda684e25e0","Type":"ContainerStarted","Data":"1e2c69df2c562f2e59b3ff5350f2563208b2c3ee34b79521c3731ae4848a1088"} Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.921511 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xwtjj" podStartSLOduration=124.921488451 podStartE2EDuration="2m4.921488451s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:35.889475648 +0000 UTC m=+145.628026007" watchObservedRunningTime="2025-12-02 11:53:35.921488451 +0000 UTC m=+145.660038800" Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.921837 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v6mzl"] Dec 02 11:53:35 crc kubenswrapper[4721]: I1202 11:53:35.942653 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:35 crc kubenswrapper[4721]: E1202 11:53:35.943842 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:36.443826117 +0000 UTC m=+146.182376466 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.053964 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-fg2ch" Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.060170 4721 patch_prober.go:28] interesting pod/console-operator-58897d9998-rv854 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/readyz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.060225 4721 patch_prober.go:28] interesting pod/downloads-7954f5f757-qx7nr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.060275 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-qx7nr" podUID="78c789c6-4ac0-4a7b-8cb6-b072909ccf03" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.060223 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-rv854" podUID="7d94bdd0-bfa5-4e54-a39a-8d8f64e10f99" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.12:8443/readyz\": dial tcp 10.217.0.12:8443: connect: connection refused" Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.061112 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:36 crc kubenswrapper[4721]: E1202 11:53:36.061508 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:36.561497339 +0000 UTC m=+146.300047688 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.067604 4721 patch_prober.go:28] interesting pod/router-default-5444994796-wtlwb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 11:53:36 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Dec 02 11:53:36 crc kubenswrapper[4721]: [+]process-running ok Dec 02 11:53:36 crc kubenswrapper[4721]: healthz check failed Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.067649 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtlwb" podUID="a26573b3-3899-4b81-ba01-fabea24142cc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.161956 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:36 crc kubenswrapper[4721]: E1202 11:53:36.162244 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:36.662225035 +0000 UTC m=+146.400775384 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.170177 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-wtlwb" podStartSLOduration=125.170160636 podStartE2EDuration="2m5.170160636s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:36.170056333 +0000 UTC m=+145.908606682" watchObservedRunningTime="2025-12-02 11:53:36.170160636 +0000 UTC m=+145.908710985" Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.268771 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:36 crc kubenswrapper[4721]: E1202 11:53:36.269469 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:36.769457867 +0000 UTC m=+146.508008216 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.373375 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:36 crc kubenswrapper[4721]: E1202 11:53:36.373789 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:36.873770816 +0000 UTC m=+146.612321165 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.474461 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:36 crc kubenswrapper[4721]: E1202 11:53:36.475113 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:36.975098131 +0000 UTC m=+146.713648480 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.509400 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-94q47"] Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.532426 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-n2z7b"] Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.586565 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:36 crc kubenswrapper[4721]: E1202 11:53:36.586943 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:37.086928968 +0000 UTC m=+146.825479317 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.622568 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-rv854" podStartSLOduration=125.622550736 podStartE2EDuration="2m5.622550736s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:36.622551416 +0000 UTC m=+146.361101765" watchObservedRunningTime="2025-12-02 11:53:36.622550736 +0000 UTC m=+146.361101085" Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.649206 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-drfrd"] Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.661735 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-ctcl6"] Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.664844 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-qx7nr" podStartSLOduration=125.663854252 podStartE2EDuration="2m5.663854252s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:36.660718252 +0000 UTC m=+146.399268601" watchObservedRunningTime="2025-12-02 11:53:36.663854252 +0000 UTC m=+146.402404601" Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.672704 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl" Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.687942 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:36 crc kubenswrapper[4721]: E1202 11:53:36.688369 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:37.188328656 +0000 UTC m=+146.926879005 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:36 crc kubenswrapper[4721]: W1202 11:53:36.698864 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb45dc373_92c5_48f1_9a77_309f0553079e.slice/crio-eeee313b33ba7cdd66be2349242b85bddf59e91b61f3e8d4b31714f2e236cc3f WatchSource:0}: Error finding container eeee313b33ba7cdd66be2349242b85bddf59e91b61f3e8d4b31714f2e236cc3f: Status 404 returned error can't find the container with id eeee313b33ba7cdd66be2349242b85bddf59e91b61f3e8d4b31714f2e236cc3f Dec 02 11:53:36 crc kubenswrapper[4721]: W1202 11:53:36.701604 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfae5f727_ca2f_43b8_9ab9_b6817c537c19.slice/crio-5dae1e471cd83f91a5ff08b1a20d95dcc08d94daf56a9b33c77343eef990a2f5 WatchSource:0}: Error finding container 5dae1e471cd83f91a5ff08b1a20d95dcc08d94daf56a9b33c77343eef990a2f5: Status 404 returned error can't find the container with id 5dae1e471cd83f91a5ff08b1a20d95dcc08d94daf56a9b33c77343eef990a2f5 Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.752982 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-5cfgp" podStartSLOduration=125.7529659 podStartE2EDuration="2m5.7529659s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:36.702123182 +0000 UTC m=+146.440673531" watchObservedRunningTime="2025-12-02 11:53:36.7529659 +0000 UTC m=+146.491516249" Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.753064 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v2gnh" podStartSLOduration=125.753060753 podStartE2EDuration="2m5.753060753s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:36.742042595 +0000 UTC m=+146.480592954" watchObservedRunningTime="2025-12-02 11:53:36.753060753 +0000 UTC m=+146.491611102" Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.778928 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-66mxw"] Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.790621 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:36 crc kubenswrapper[4721]: E1202 11:53:36.790943 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:37.290930211 +0000 UTC m=+147.029480550 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.804912 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-vhhcx" podStartSLOduration=126.804889282 podStartE2EDuration="2m6.804889282s" podCreationTimestamp="2025-12-02 11:51:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:36.772497408 +0000 UTC m=+146.511047757" watchObservedRunningTime="2025-12-02 11:53:36.804889282 +0000 UTC m=+146.543439631" Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.805719 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dwqdt"] Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.895101 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:36 crc kubenswrapper[4721]: E1202 11:53:36.895626 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:37.395614812 +0000 UTC m=+147.134165161 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.906916 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rgmvn" event={"ID":"db4c5b64-6af8-4707-a667-b8f1e8435335","Type":"ContainerStarted","Data":"882120732e0fb09d1dae1b0411e417f9778f5f97c759a9ff1dcea908f36c09d2"} Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.906953 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rgmvn" event={"ID":"db4c5b64-6af8-4707-a667-b8f1e8435335","Type":"ContainerStarted","Data":"cc5db25f935d2d699d0451f959f7229d4b5c1e896c63f6f481a01347f113f8c1"} Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.908619 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rgmvn" Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.924135 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-94q47" event={"ID":"fae5f727-ca2f-43b8-9ab9-b6817c537c19","Type":"ContainerStarted","Data":"5dae1e471cd83f91a5ff08b1a20d95dcc08d94daf56a9b33c77343eef990a2f5"} Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.926937 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xf8pm" event={"ID":"71be1dc6-76a8-4155-9900-1553a730a64a","Type":"ContainerStarted","Data":"3c820501729220dee335b7b8a61dce2732c5a9c92c3c8997c6a6ec3d13dcae20"} Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.926965 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xf8pm" event={"ID":"71be1dc6-76a8-4155-9900-1553a730a64a","Type":"ContainerStarted","Data":"73542d34fd1a48c7a6b1ee83f90fbcdca72da02376f1fb951626cd3484d35377"} Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.927940 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-gtb8k" event={"ID":"5754f3db-279a-4340-9354-8709b29e0398","Type":"ContainerStarted","Data":"b137ec3317f50b7b548136e99fcada74e65bb27ef32b783ecb48d77844237afa"} Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.928711 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-gtb8k" Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.928779 4721 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-rgmvn container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.40:8443/healthz\": dial tcp 10.217.0.40:8443: connect: connection refused" start-of-body= Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.928805 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rgmvn" podUID="db4c5b64-6af8-4707-a667-b8f1e8435335" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.40:8443/healthz\": dial tcp 10.217.0.40:8443: connect: connection refused" Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.941328 4721 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-gtb8k container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.21:8080/healthz\": dial tcp 10.217.0.21:8080: connect: connection refused" start-of-body= Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.941411 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-gtb8k" podUID="5754f3db-279a-4340-9354-8709b29e0398" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.21:8080/healthz\": dial tcp 10.217.0.21:8080: connect: connection refused" Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.955632 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" event={"ID":"70910e18-bc62-4cb7-bc07-0dc3a8a157ce","Type":"ContainerStarted","Data":"6bd86470c68a8ed2da1d300269dbb76fb34b14786552c7205d2777b54e5358f1"} Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.959706 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-tl5t9" podStartSLOduration=125.959688379 podStartE2EDuration="2m5.959688379s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:36.833395234 +0000 UTC m=+146.571945583" watchObservedRunningTime="2025-12-02 11:53:36.959688379 +0000 UTC m=+146.698238738" Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.960110 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pf5mn" event={"ID":"a275db4b-1993-4d42-b623-2bc621a59042","Type":"ContainerStarted","Data":"79e4eb13561aa079d425823059aa90c0da5c166e47fb52f39df503706816cb40"} Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.961429 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411265-f5k6f"] Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.988635 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-66mxw" event={"ID":"2b5cafc4-348a-4e37-b2a1-4c51e5111d43","Type":"ContainerStarted","Data":"3bad09358a6328692b6d17c3a5d6f3787fbb40358c08412a7106499a31aadd8e"} Dec 02 11:53:36 crc kubenswrapper[4721]: I1202 11:53:36.996229 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:36 crc kubenswrapper[4721]: E1202 11:53:36.997294 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:37.497264428 +0000 UTC m=+147.235814777 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.010267 4721 patch_prober.go:28] interesting pod/router-default-5444994796-wtlwb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 11:53:37 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Dec 02 11:53:37 crc kubenswrapper[4721]: [+]process-running ok Dec 02 11:53:37 crc kubenswrapper[4721]: healthz check failed Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.010319 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtlwb" podUID="a26573b3-3899-4b81-ba01-fabea24142cc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.011056 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-x7gjh" event={"ID":"36ba50bc-f888-4f82-af4c-9879d8089273","Type":"ContainerStarted","Data":"8630e9e02cbff8e4a871a69b857ba9bbdaf3f2a17fc1cd7920b99a01d6746c15"} Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.028106 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-x7gjh" event={"ID":"36ba50bc-f888-4f82-af4c-9879d8089273","Type":"ContainerStarted","Data":"873a7f95957c5a1a2d98820429e152f4efea7908b1b25d67b87312c3dc0ec6b4"} Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.071776 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-mkdl7" event={"ID":"a8ef44c3-c7a8-4f21-b180-a6c3a9f7e292","Type":"ContainerStarted","Data":"b4281cd714e70ee1611934acd9b7eb7ac18055f11a92debaeebfbd4d9045232b"} Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.085927 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-pcp6j"] Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.093096 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl" podStartSLOduration=126.093075998 podStartE2EDuration="2m6.093075998s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:37.080966605 +0000 UTC m=+146.819516964" watchObservedRunningTime="2025-12-02 11:53:37.093075998 +0000 UTC m=+146.831626347" Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.100998 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-bl9q7"] Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.111078 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:37 crc kubenswrapper[4721]: E1202 11:53:37.112879 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:37.612845793 +0000 UTC m=+147.351396222 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.133586 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v6mzl" event={"ID":"df329555-e5bd-49b3-bce4-c995b4999755","Type":"ContainerStarted","Data":"afa3c056cd4135d14db60de0a7c9874f5d093503f1f9dad20ed753df6220720d"} Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.168759 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v8wzq" event={"ID":"70b6509b-bcfc-4ad7-8d97-ce279333b048","Type":"ContainerStarted","Data":"39fda4ef4a8b63a4b27e699543e6721461a1127a2403fcd6087bd4a01341b817"} Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.168797 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v8wzq" event={"ID":"70b6509b-bcfc-4ad7-8d97-ce279333b048","Type":"ContainerStarted","Data":"3284b8f1f2826ea4641b1d49971f3fa5bd552aacf57f036245efc5237dc60277"} Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.171951 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-fg2ch" podStartSLOduration=126.171928882 podStartE2EDuration="2m6.171928882s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:37.168981838 +0000 UTC m=+146.907532187" watchObservedRunningTime="2025-12-02 11:53:37.171928882 +0000 UTC m=+146.910479231" Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.174946 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-857kt"] Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.191448 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-n2cvg"] Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.196332 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-clflg"] Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.213226 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:37 crc kubenswrapper[4721]: E1202 11:53:37.214286 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:37.714270191 +0000 UTC m=+147.452820540 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.229039 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" event={"ID":"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7","Type":"ContainerStarted","Data":"16c8b209eaf4383bf800c7d2c1687b4a3ebc6e04e36cfbedd9b318f510388e09"} Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.229820 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.230215 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tgqm" podStartSLOduration=126.230205486 podStartE2EDuration="2m6.230205486s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:37.204294106 +0000 UTC m=+146.942844455" watchObservedRunningTime="2025-12-02 11:53:37.230205486 +0000 UTC m=+146.968755835" Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.241727 4721 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-wb9r7 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.33:6443/healthz\": dial tcp 10.217.0.33:6443: connect: connection refused" start-of-body= Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.241795 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" podUID="9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.33:6443/healthz\": dial tcp 10.217.0.33:6443: connect: connection refused" Dec 02 11:53:37 crc kubenswrapper[4721]: W1202 11:53:37.245188 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda22a1d4a_00c0_4109_8eff_3492590aeb1a.slice/crio-bd0ba4bcf928463aa9c654c89b7d336e9c6d92d4eb8a4a23fc305faf6e41b6c1 WatchSource:0}: Error finding container bd0ba4bcf928463aa9c654c89b7d336e9c6d92d4eb8a4a23fc305faf6e41b6c1: Status 404 returned error can't find the container with id bd0ba4bcf928463aa9c654c89b7d336e9c6d92d4eb8a4a23fc305faf6e41b6c1 Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.280840 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-zjwgh" podStartSLOduration=126.280810565 podStartE2EDuration="2m6.280810565s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:37.260725081 +0000 UTC m=+146.999275430" watchObservedRunningTime="2025-12-02 11:53:37.280810565 +0000 UTC m=+147.019360914" Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.282295 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-57995" podStartSLOduration=127.282287203 podStartE2EDuration="2m7.282287203s" podCreationTimestamp="2025-12-02 11:51:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:37.281181507 +0000 UTC m=+147.019731856" watchObservedRunningTime="2025-12-02 11:53:37.282287203 +0000 UTC m=+147.020837552" Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.286271 4721 generic.go:334] "Generic (PLEG): container finished" podID="0f706843-68b8-4598-9482-9c1361f60a63" containerID="6718f17b83767dc59d09f9770120084b26f743c3776c142c7302d3540a6ebc13" exitCode=0 Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.286367 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" event={"ID":"0f706843-68b8-4598-9482-9c1361f60a63","Type":"ContainerDied","Data":"6718f17b83767dc59d09f9770120084b26f743c3776c142c7302d3540a6ebc13"} Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.312461 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pf5mn" podStartSLOduration=129.312444376 podStartE2EDuration="2m9.312444376s" podCreationTimestamp="2025-12-02 11:51:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:37.307095307 +0000 UTC m=+147.045645656" watchObservedRunningTime="2025-12-02 11:53:37.312444376 +0000 UTC m=+147.050994725" Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.312945 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rb6cf"] Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.316560 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.317289 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-n2z7b" event={"ID":"bc7ef085-1e7b-4bcc-b00b-42deb769d466","Type":"ContainerStarted","Data":"73e41678498b4d314645372d5ebd369dd03c31e5d242cf429b82ae08c60927df"} Dec 02 11:53:37 crc kubenswrapper[4721]: E1202 11:53:37.317895 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:37.817881418 +0000 UTC m=+147.556431767 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:37 crc kubenswrapper[4721]: W1202 11:53:37.338814 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode528e899_db16_4e75_a55f_4eaa2462b90d.slice/crio-a698dc1257dacf53d798bae2b757edc8fbc7eb58b28a6a1878a54ae1e37c790f WatchSource:0}: Error finding container a698dc1257dacf53d798bae2b757edc8fbc7eb58b28a6a1878a54ae1e37c790f: Status 404 returned error can't find the container with id a698dc1257dacf53d798bae2b757edc8fbc7eb58b28a6a1878a54ae1e37c790f Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.361521 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8wrt4" event={"ID":"30469e3e-7560-4574-8657-06fe673ed079","Type":"ContainerStarted","Data":"c1db4415e706f9115d915aaa8dd50741596b1684a9faf9c9add8146b8d46a7ed"} Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.395038 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-vhp6r" event={"ID":"77ff2b86-bf1c-4eda-abe0-dd8692cca140","Type":"ContainerStarted","Data":"e8f32f1abb8abcebb3ac96ecad7890c49ac2a35f265dba2bf325e99891cd18f0"} Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.395373 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-vhp6r" event={"ID":"77ff2b86-bf1c-4eda-abe0-dd8692cca140","Type":"ContainerStarted","Data":"2218a2e7aef1fe65cb3195111cda60ae5b0bd3089c297bdf71d67252b81842ce"} Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.408961 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ctcl6" event={"ID":"b45dc373-92c5-48f1-9a77-309f0553079e","Type":"ContainerStarted","Data":"eeee313b33ba7cdd66be2349242b85bddf59e91b61f3e8d4b31714f2e236cc3f"} Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.413929 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-drfrd" event={"ID":"18b11741-408b-4270-abd3-b43fd24149ab","Type":"ContainerStarted","Data":"d53fa01dda51559836eb6fb2cda451ca71572d41a661fc3992eebb86194fc24a"} Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.416884 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-x7gjh" podStartSLOduration=126.416864059 podStartE2EDuration="2m6.416864059s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:37.416452256 +0000 UTC m=+147.155002605" watchObservedRunningTime="2025-12-02 11:53:37.416864059 +0000 UTC m=+147.155414408" Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.417178 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.418049 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v8wzq" podStartSLOduration=126.418044126 podStartE2EDuration="2m6.418044126s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:37.370835253 +0000 UTC m=+147.109385602" watchObservedRunningTime="2025-12-02 11:53:37.418044126 +0000 UTC m=+147.156594475" Dec 02 11:53:37 crc kubenswrapper[4721]: E1202 11:53:37.428175 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:37.928155747 +0000 UTC m=+147.666706086 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.460899 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-brktp" event={"ID":"213e4819-aa67-42ad-954c-5a461b370407","Type":"ContainerStarted","Data":"b4b34a6481e257cad957fe3e86729ee5c8b936f0058ddd909aa23c3db15156e9"} Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.460939 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-brktp" event={"ID":"213e4819-aa67-42ad-954c-5a461b370407","Type":"ContainerStarted","Data":"f597b416c48e2cdfd041246131b6d1dabf1ccc7820e96da460600f486780662d"} Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.473619 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-gtb8k" podStartSLOduration=126.473603814 podStartE2EDuration="2m6.473603814s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:37.473326396 +0000 UTC m=+147.211876745" watchObservedRunningTime="2025-12-02 11:53:37.473603814 +0000 UTC m=+147.212154163" Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.476387 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-mkdl7" podStartSLOduration=6.476373722 podStartE2EDuration="6.476373722s" podCreationTimestamp="2025-12-02 11:53:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:37.445711102 +0000 UTC m=+147.184261451" watchObservedRunningTime="2025-12-02 11:53:37.476373722 +0000 UTC m=+147.214924071" Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.518729 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:37 crc kubenswrapper[4721]: E1202 11:53:37.520731 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:38.020718344 +0000 UTC m=+147.759268693 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.524788 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" podStartSLOduration=127.524766572 podStartE2EDuration="2m7.524766572s" podCreationTimestamp="2025-12-02 11:51:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:37.522314515 +0000 UTC m=+147.260864864" watchObservedRunningTime="2025-12-02 11:53:37.524766572 +0000 UTC m=+147.263316921" Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.525312 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rgmvn" podStartSLOduration=126.525307359 podStartE2EDuration="2m6.525307359s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:37.494861346 +0000 UTC m=+147.233411695" watchObservedRunningTime="2025-12-02 11:53:37.525307359 +0000 UTC m=+147.263857708" Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.554949 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bj7cn" event={"ID":"0863475f-c2fe-4b3c-abca-eaecb1e26561","Type":"ContainerStarted","Data":"cf7024f6fa6a5548ec98d6db15bb187c8b472b386bca6ce04eabeb4e164a6602"} Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.555148 4721 patch_prober.go:28] interesting pod/downloads-7954f5f757-qx7nr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.555182 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-qx7nr" podUID="78c789c6-4ac0-4a7b-8cb6-b072909ccf03" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.555938 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-vhp6r" podStartSLOduration=6.555921618 podStartE2EDuration="6.555921618s" podCreationTimestamp="2025-12-02 11:53:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:37.553745799 +0000 UTC m=+147.292296148" watchObservedRunningTime="2025-12-02 11:53:37.555921618 +0000 UTC m=+147.294471967" Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.556260 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bj7cn" Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.658117 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:37 crc kubenswrapper[4721]: E1202 11:53:37.659678 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:38.159661979 +0000 UTC m=+147.898212328 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.695709 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-brktp" podStartSLOduration=126.695691868 podStartE2EDuration="2m6.695691868s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:37.659457672 +0000 UTC m=+147.398008011" watchObservedRunningTime="2025-12-02 11:53:37.695691868 +0000 UTC m=+147.434242217" Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.759798 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.760017 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.760066 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.760228 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.760263 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.780636 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:53:37 crc kubenswrapper[4721]: E1202 11:53:37.781411 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:38.281146022 +0000 UTC m=+148.019696371 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.786060 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.789316 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.803935 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.868177 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:37 crc kubenswrapper[4721]: E1202 11:53:37.868277 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:38.368252027 +0000 UTC m=+148.106802386 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.868719 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:37 crc kubenswrapper[4721]: E1202 11:53:37.869050 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:38.369033931 +0000 UTC m=+148.107584280 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.970421 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:37 crc kubenswrapper[4721]: E1202 11:53:37.970822 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:38.470806151 +0000 UTC m=+148.209356500 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.986117 4721 patch_prober.go:28] interesting pod/router-default-5444994796-wtlwb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 11:53:37 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Dec 02 11:53:37 crc kubenswrapper[4721]: [+]process-running ok Dec 02 11:53:37 crc kubenswrapper[4721]: healthz check failed Dec 02 11:53:37 crc kubenswrapper[4721]: I1202 11:53:37.986167 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtlwb" podUID="a26573b3-3899-4b81-ba01-fabea24142cc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.053275 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.063468 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.069561 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.071911 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:38 crc kubenswrapper[4721]: E1202 11:53:38.072567 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:38.572555659 +0000 UTC m=+148.311106008 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.174643 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:38 crc kubenswrapper[4721]: E1202 11:53:38.174799 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:38.674777852 +0000 UTC m=+148.413328201 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.174869 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:38 crc kubenswrapper[4721]: E1202 11:53:38.175136 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:38.675125403 +0000 UTC m=+148.413675752 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.190999 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-rv854" Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.246605 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bj7cn" podStartSLOduration=127.246589133 podStartE2EDuration="2m7.246589133s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:37.696090921 +0000 UTC m=+147.434641270" watchObservedRunningTime="2025-12-02 11:53:38.246589133 +0000 UTC m=+147.985139482" Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.280812 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:38 crc kubenswrapper[4721]: E1202 11:53:38.280950 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:38.78092947 +0000 UTC m=+148.519479819 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.281303 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:38 crc kubenswrapper[4721]: E1202 11:53:38.281598 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:38.781585701 +0000 UTC m=+148.520136050 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.382855 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:38 crc kubenswrapper[4721]: E1202 11:53:38.383189 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:38.883171454 +0000 UTC m=+148.621721803 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.484314 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:38 crc kubenswrapper[4721]: E1202 11:53:38.484842 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:38.984826949 +0000 UTC m=+148.723377298 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.594266 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:38 crc kubenswrapper[4721]: E1202 11:53:38.594831 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:39.094805498 +0000 UTC m=+148.833355847 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.617342 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xf8pm" event={"ID":"71be1dc6-76a8-4155-9900-1553a730a64a","Type":"ContainerStarted","Data":"9bee6284b155f77d06c7b449ff601d6779a70a26d2c818f4b30497529eec9eba"} Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.654283 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dwqdt" event={"ID":"2063cb21-914f-4d0c-a6a7-e8edee1a4f9a","Type":"ContainerStarted","Data":"6fc49cd8cae82a1668a36bdec87730e4712ed32066d7ba822c7d04aa1d63605d"} Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.654328 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dwqdt" event={"ID":"2063cb21-914f-4d0c-a6a7-e8edee1a4f9a","Type":"ContainerStarted","Data":"53555fd9e9a1cbbea778f9bd742ed2d8eb45aed0c52abb4090dea7745cd6b061"} Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.655330 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-xf8pm" podStartSLOduration=128.65529689 podStartE2EDuration="2m8.65529689s" podCreationTimestamp="2025-12-02 11:51:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:38.65399748 +0000 UTC m=+148.392547829" watchObservedRunningTime="2025-12-02 11:53:38.65529689 +0000 UTC m=+148.393847239" Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.685619 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v6mzl" event={"ID":"df329555-e5bd-49b3-bce4-c995b4999755","Type":"ContainerStarted","Data":"a438c284600ea49431de0f75ee6c5eaf175642eb8be334fc55d28ddbaacf5820"} Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.685665 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v6mzl" event={"ID":"df329555-e5bd-49b3-bce4-c995b4999755","Type":"ContainerStarted","Data":"3fe3f2ec006e93b1cf91806ee4a1422bbc0638a269fe861b7968ae63b78c158d"} Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.685847 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v6mzl" Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.696547 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:38 crc kubenswrapper[4721]: E1202 11:53:38.697025 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:39.197012431 +0000 UTC m=+148.935562780 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.706836 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411265-f5k6f" event={"ID":"e7980e22-75e8-4495-afb8-f486065789c4","Type":"ContainerStarted","Data":"0ea38412c49ec404101d03174feee3413ce3857d963fb948b8eb659016ba75c3"} Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.706879 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411265-f5k6f" event={"ID":"e7980e22-75e8-4495-afb8-f486065789c4","Type":"ContainerStarted","Data":"7343c0e911321540c672be7c133b60e3d14ae0fb6e7f9fba093ec3b73ae57e78"} Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.712025 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-dwqdt" podStartSLOduration=127.712011605 podStartE2EDuration="2m7.712011605s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:38.679104274 +0000 UTC m=+148.417654623" watchObservedRunningTime="2025-12-02 11:53:38.712011605 +0000 UTC m=+148.450561944" Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.761193 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-pcp6j" event={"ID":"dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9","Type":"ContainerStarted","Data":"35484e3c2e6ed0be2c05d526ba90ce34a0807fa08072e9be471d5fa2641d17d9"} Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.762394 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v6mzl" podStartSLOduration=127.762374368 podStartE2EDuration="2m7.762374368s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:38.716032972 +0000 UTC m=+148.454583321" watchObservedRunningTime="2025-12-02 11:53:38.762374368 +0000 UTC m=+148.500924737" Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.800862 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-drfrd" event={"ID":"18b11741-408b-4270-abd3-b43fd24149ab","Type":"ContainerStarted","Data":"851f60c0ff557d83e2be23ec6fca9bcd1c83e4ec5c4ffecd90683bb8d9ec6121"} Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.801380 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:38 crc kubenswrapper[4721]: E1202 11:53:38.802534 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:39.302516398 +0000 UTC m=+149.041066747 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.821099 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" event={"ID":"0f706843-68b8-4598-9482-9c1361f60a63","Type":"ContainerStarted","Data":"0df374b9804dc495e18c8ba3b9a059e0d1780cf61c5ccfeb7eb2e9bc9092e193"} Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.834496 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29411265-f5k6f" podStartSLOduration=128.834475148 podStartE2EDuration="2m8.834475148s" podCreationTimestamp="2025-12-02 11:51:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:38.759899269 +0000 UTC m=+148.498449618" watchObservedRunningTime="2025-12-02 11:53:38.834475148 +0000 UTC m=+148.573025497" Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.845416 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-drfrd" podStartSLOduration=127.845394314 podStartE2EDuration="2m7.845394314s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:38.822963855 +0000 UTC m=+148.561514204" watchObservedRunningTime="2025-12-02 11:53:38.845394314 +0000 UTC m=+148.583944663" Dec 02 11:53:38 crc kubenswrapper[4721]: W1202 11:53:38.874442 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-9e7ebcebd78f24be90fd696f292c3127457ed42b8dfcb2da6f6708feeb4bc11d WatchSource:0}: Error finding container 9e7ebcebd78f24be90fd696f292c3127457ed42b8dfcb2da6f6708feeb4bc11d: Status 404 returned error can't find the container with id 9e7ebcebd78f24be90fd696f292c3127457ed42b8dfcb2da6f6708feeb4bc11d Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.875534 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-94q47" event={"ID":"fae5f727-ca2f-43b8-9ab9-b6817c537c19","Type":"ContainerStarted","Data":"8377a0c2a990c2c12dfd958e155bb526c2ad8d3a7c7b5642ac9686e340cd451b"} Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.875577 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-94q47" Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.889594 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-bl9q7" event={"ID":"5cd2ab71-ee95-4e70-a189-ef6152172de4","Type":"ContainerStarted","Data":"84363f93bda340a6c5d3aa098ba5012c88a252810666c8e2bf6d1c03d820e86b"} Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.889632 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-bl9q7" event={"ID":"5cd2ab71-ee95-4e70-a189-ef6152172de4","Type":"ContainerStarted","Data":"bd2a2d3133306eafa57b0b06bbeab04b5f46a12a1bed32a4861550a3ac3bb43c"} Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.911715 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ctcl6" event={"ID":"b45dc373-92c5-48f1-9a77-309f0553079e","Type":"ContainerStarted","Data":"f07062249941b1f2c335f9ff62b72c24d9d7952ec967ac18859d4216e185bdc1"} Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.911750 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ctcl6" event={"ID":"b45dc373-92c5-48f1-9a77-309f0553079e","Type":"ContainerStarted","Data":"4ad8c5952de1fd4ccc1377773e6328c19b7d893448156d35c7b7950c89e00c95"} Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.920487 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:38 crc kubenswrapper[4721]: E1202 11:53:38.921529 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:39.421508061 +0000 UTC m=+149.160058410 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.958899 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-66mxw" event={"ID":"2b5cafc4-348a-4e37-b2a1-4c51e5111d43","Type":"ContainerStarted","Data":"2c243e5d0d312fabfd674ae3d9772fd28693dfd53409438df49e2b7a82a7fa73"} Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.958951 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-66mxw" event={"ID":"2b5cafc4-348a-4e37-b2a1-4c51e5111d43","Type":"ContainerStarted","Data":"6491a158c5f36f038ed1635a070f768ed94c41d28e2a1d299b82e08e0113ed08"} Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.959036 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-ctcl6" podStartSLOduration=127.959025878 podStartE2EDuration="2m7.959025878s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:38.957934173 +0000 UTC m=+148.696484522" watchObservedRunningTime="2025-12-02 11:53:38.959025878 +0000 UTC m=+148.697576227" Dec 02 11:53:38 crc kubenswrapper[4721]: I1202 11:53:38.967154 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" podStartSLOduration=127.967132574 podStartE2EDuration="2m7.967132574s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:38.912012761 +0000 UTC m=+148.650563110" watchObservedRunningTime="2025-12-02 11:53:38.967132574 +0000 UTC m=+148.705682923" Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.011753 4721 patch_prober.go:28] interesting pod/router-default-5444994796-wtlwb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 11:53:39 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Dec 02 11:53:39 crc kubenswrapper[4721]: [+]process-running ok Dec 02 11:53:39 crc kubenswrapper[4721]: healthz check failed Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.011810 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtlwb" podUID="a26573b3-3899-4b81-ba01-fabea24142cc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.018143 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-857kt" event={"ID":"a22a1d4a-00c0-4109-8eff-3492590aeb1a","Type":"ContainerStarted","Data":"afaf05a2c9faee97916bbe3dd0bdc5984a2d54d4e3dee9decfae862379743733"} Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.018189 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-857kt" event={"ID":"a22a1d4a-00c0-4109-8eff-3492590aeb1a","Type":"ContainerStarted","Data":"bd0ba4bcf928463aa9c654c89b7d336e9c6d92d4eb8a4a23fc305faf6e41b6c1"} Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.019050 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-857kt" Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.022414 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:39 crc kubenswrapper[4721]: E1202 11:53:39.023392 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:39.523375713 +0000 UTC m=+149.261926062 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.044776 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-94q47" podStartSLOduration=128.044751029 podStartE2EDuration="2m8.044751029s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:39.037826181 +0000 UTC m=+148.776376550" watchObservedRunningTime="2025-12-02 11:53:39.044751029 +0000 UTC m=+148.783301408" Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.045437 4721 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-857kt container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.36:8443/healthz\": dial tcp 10.217.0.36:8443: connect: connection refused" start-of-body= Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.045500 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-857kt" podUID="a22a1d4a-00c0-4109-8eff-3492590aeb1a" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.36:8443/healthz\": dial tcp 10.217.0.36:8443: connect: connection refused" Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.059428 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rb6cf" event={"ID":"e528e899-db16-4e75-a55f-4eaa2462b90d","Type":"ContainerStarted","Data":"f9bc510c56d073b8353a394186a08cfe63dfa11ddfaae12dca232216513cdf06"} Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.059744 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rb6cf" event={"ID":"e528e899-db16-4e75-a55f-4eaa2462b90d","Type":"ContainerStarted","Data":"a698dc1257dacf53d798bae2b757edc8fbc7eb58b28a6a1878a54ae1e37c790f"} Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.093075 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-n2z7b" event={"ID":"bc7ef085-1e7b-4bcc-b00b-42deb769d466","Type":"ContainerStarted","Data":"14f253c74657662dd894c74f3186b02305149d1d07174bab7a86aaf3b62cb3c1"} Dec 02 11:53:39 crc kubenswrapper[4721]: W1202 11:53:39.108669 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-6b2d86c25d3f96df6a34c96aba39dfecc42b94802ae21b7e74c044a08ad2ba8b WatchSource:0}: Error finding container 6b2d86c25d3f96df6a34c96aba39dfecc42b94802ae21b7e74c044a08ad2ba8b: Status 404 returned error can't find the container with id 6b2d86c25d3f96df6a34c96aba39dfecc42b94802ae21b7e74c044a08ad2ba8b Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.109768 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-bl9q7" podStartSLOduration=128.109748025 podStartE2EDuration="2m8.109748025s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:39.082566706 +0000 UTC m=+148.821117055" watchObservedRunningTime="2025-12-02 11:53:39.109748025 +0000 UTC m=+148.848298374" Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.110983 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rb6cf" podStartSLOduration=128.110977554 podStartE2EDuration="2m8.110977554s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:39.108714043 +0000 UTC m=+148.847264392" watchObservedRunningTime="2025-12-02 11:53:39.110977554 +0000 UTC m=+148.849527903" Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.125245 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:39 crc kubenswrapper[4721]: E1202 11:53:39.125714 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:39.625697109 +0000 UTC m=+149.364247458 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.139335 4721 generic.go:334] "Generic (PLEG): container finished" podID="70910e18-bc62-4cb7-bc07-0dc3a8a157ce" containerID="2006e89d99bd5b8689ffd2860cda9a26d20d7d1957b632c22398efcaa0e4e2a6" exitCode=0 Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.139410 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" event={"ID":"70910e18-bc62-4cb7-bc07-0dc3a8a157ce","Type":"ContainerDied","Data":"2006e89d99bd5b8689ffd2860cda9a26d20d7d1957b632c22398efcaa0e4e2a6"} Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.161176 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-857kt" podStartSLOduration=128.161156862 podStartE2EDuration="2m8.161156862s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:39.157741013 +0000 UTC m=+148.896291362" watchObservedRunningTime="2025-12-02 11:53:39.161156862 +0000 UTC m=+148.899707211" Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.208014 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-66mxw" podStartSLOduration=128.207998593 podStartE2EDuration="2m8.207998593s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:39.194642961 +0000 UTC m=+148.933193310" watchObservedRunningTime="2025-12-02 11:53:39.207998593 +0000 UTC m=+148.946548942" Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.209062 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-clflg" event={"ID":"9499299a-d0a7-4f0b-a123-9a8d5e3fd0a9","Type":"ContainerStarted","Data":"3fcd56b8489b2d143abf74161e236322b7e48e4202d60721aba90ab8a1b7218d"} Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.209096 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-clflg" event={"ID":"9499299a-d0a7-4f0b-a123-9a8d5e3fd0a9","Type":"ContainerStarted","Data":"6729f2ee7577376d72b5db98d4ac32240882e3191a44f1cb6787b2db78e2aebc"} Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.226126 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:39 crc kubenswrapper[4721]: E1202 11:53:39.226491 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:39.726463907 +0000 UTC m=+149.465014256 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.248747 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8wrt4" event={"ID":"30469e3e-7560-4574-8657-06fe673ed079","Type":"ContainerStarted","Data":"db8e928dfb2b9f4cfa3359d69fa2a422d1a277adf4db767bdcf53408967f9ac3"} Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.248813 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8wrt4" event={"ID":"30469e3e-7560-4574-8657-06fe673ed079","Type":"ContainerStarted","Data":"c7b72a051a43d7519aee95b8a9737b1d5938ae729f8f4322e07213e134a0b554"} Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.298699 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-n2z7b" podStartSLOduration=128.298673381 podStartE2EDuration="2m8.298673381s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:39.297717551 +0000 UTC m=+149.036267900" watchObservedRunningTime="2025-12-02 11:53:39.298673381 +0000 UTC m=+149.037223730" Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.303659 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-n2cvg" event={"ID":"76d8c7b7-9f4b-4e54-9466-75bef179ce83","Type":"ContainerStarted","Data":"f8309fe85b17bbfa02c159dba268b00da8b1289b23cedef7a9ba6e8b694f33ef"} Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.309725 4721 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-gtb8k container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.21:8080/healthz\": dial tcp 10.217.0.21:8080: connect: connection refused" start-of-body= Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.309960 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-gtb8k" podUID="5754f3db-279a-4340-9354-8709b29e0398" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.21:8080/healthz\": dial tcp 10.217.0.21:8080: connect: connection refused" Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.324395 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8wrt4" podStartSLOduration=128.324381494 podStartE2EDuration="2m8.324381494s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:39.322491484 +0000 UTC m=+149.061041833" watchObservedRunningTime="2025-12-02 11:53:39.324381494 +0000 UTC m=+149.062931843" Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.327814 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.331733 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-rgmvn" Dec 02 11:53:39 crc kubenswrapper[4721]: E1202 11:53:39.332040 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:39.832027616 +0000 UTC m=+149.570577965 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.360006 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.360311 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.370467 4721 patch_prober.go:28] interesting pod/apiserver-7bbb656c7d-qpb9s container/oauth-apiserver namespace/openshift-oauth-apiserver: Startup probe status=failure output="Get \"https://10.217.0.7:8443/livez\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.370514 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" podUID="0f706843-68b8-4598-9482-9c1361f60a63" containerName="oauth-apiserver" probeResult="failure" output="Get \"https://10.217.0.7:8443/livez\": dial tcp 10.217.0.7:8443: connect: connection refused" Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.387305 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-clflg" podStartSLOduration=128.387289724 podStartE2EDuration="2m8.387289724s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:39.356924214 +0000 UTC m=+149.095474583" watchObservedRunningTime="2025-12-02 11:53:39.387289724 +0000 UTC m=+149.125840073" Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.433940 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:39 crc kubenswrapper[4721]: E1202 11:53:39.435556 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:39.93553734 +0000 UTC m=+149.674087679 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.466485 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bj7cn" Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.536963 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:39 crc kubenswrapper[4721]: E1202 11:53:39.537273 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:40.037260887 +0000 UTC m=+149.775811236 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.621105 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.638880 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:39 crc kubenswrapper[4721]: E1202 11:53:39.639255 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:40.139234803 +0000 UTC m=+149.877785152 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.740965 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:39 crc kubenswrapper[4721]: E1202 11:53:39.741405 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:40.241388625 +0000 UTC m=+149.979938974 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.841899 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:39 crc kubenswrapper[4721]: E1202 11:53:39.842292 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:40.342276695 +0000 UTC m=+150.080827044 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.849440 4721 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-94q47 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.24:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.849510 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-94q47" podUID="fae5f727-ca2f-43b8-9ab9-b6817c537c19" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.24:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.943745 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:39 crc kubenswrapper[4721]: E1202 11:53:39.944127 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:40.444111296 +0000 UTC m=+150.182661645 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.986015 4721 patch_prober.go:28] interesting pod/router-default-5444994796-wtlwb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 11:53:39 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Dec 02 11:53:39 crc kubenswrapper[4721]: [+]process-running ok Dec 02 11:53:39 crc kubenswrapper[4721]: healthz check failed Dec 02 11:53:39 crc kubenswrapper[4721]: I1202 11:53:39.986132 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtlwb" podUID="a26573b3-3899-4b81-ba01-fabea24142cc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.044869 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:40 crc kubenswrapper[4721]: E1202 11:53:40.045202 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:40.545186603 +0000 UTC m=+150.283736952 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.146378 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:40 crc kubenswrapper[4721]: E1202 11:53:40.146984 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:40.646962763 +0000 UTC m=+150.385513112 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.248092 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:40 crc kubenswrapper[4721]: E1202 11:53:40.248287 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:40.748255366 +0000 UTC m=+150.486805715 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.248440 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:40 crc kubenswrapper[4721]: E1202 11:53:40.248765 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:40.748750922 +0000 UTC m=+150.487301271 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.314092 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-clflg" event={"ID":"9499299a-d0a7-4f0b-a123-9a8d5e3fd0a9","Type":"ContainerStarted","Data":"56684856e4f4f3bccadf42820de19eda277364f8b13d5e8567915b362ed4d30a"} Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.317901 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-n2cvg" event={"ID":"76d8c7b7-9f4b-4e54-9466-75bef179ce83","Type":"ContainerStarted","Data":"6b3a825d259d54166bf130187ecd4163f9abee1d9b3684c5ca5b244d1804b2ac"} Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.317961 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-n2cvg" event={"ID":"76d8c7b7-9f4b-4e54-9466-75bef179ce83","Type":"ContainerStarted","Data":"fa3bd719acb22678c13ce4f309472249690d07da915b1b074ea5a94be71d4384"} Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.318050 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-n2cvg" Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.319995 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-n2z7b" event={"ID":"bc7ef085-1e7b-4bcc-b00b-42deb769d466","Type":"ContainerStarted","Data":"0c1a59baeadccd766a36d0cb31d839768a48b4f70f9bb598c287e28a358eaf67"} Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.324886 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" event={"ID":"70910e18-bc62-4cb7-bc07-0dc3a8a157ce","Type":"ContainerStarted","Data":"fcd24fc19f000425e645704f68f81db75d94cd01dae4ad3cc0e0202465f4e846"} Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.324944 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" event={"ID":"70910e18-bc62-4cb7-bc07-0dc3a8a157ce","Type":"ContainerStarted","Data":"d3c2e65aa789a721cb0a3f723a3eec604991b1d68249ea6c550301628acdd724"} Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.330000 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"e02621c39cb30c046cf241765354748715796a664519d4016d2c0ed8f8029f6c"} Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.330056 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"386949e14dcd0d098a40c142059dd072b1919fddef9606d116d08ba74f3c7405"} Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.331821 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"1f50089f03e9f849b4df55e88de15855324fbd75008bd528d304c0bd55bfdbe5"} Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.331848 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"6b2d86c25d3f96df6a34c96aba39dfecc42b94802ae21b7e74c044a08ad2ba8b"} Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.333365 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"ca5ec9aa70fb8003790b303fa464d26482468aca501df44bf426a4e572c035ab"} Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.333445 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"9e7ebcebd78f24be90fd696f292c3127457ed42b8dfcb2da6f6708feeb4bc11d"} Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.333649 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.338404 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-n2cvg" podStartSLOduration=8.338388217 podStartE2EDuration="8.338388217s" podCreationTimestamp="2025-12-02 11:53:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:40.336754126 +0000 UTC m=+150.075304475" watchObservedRunningTime="2025-12-02 11:53:40.338388217 +0000 UTC m=+150.076938566" Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.338861 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-pcp6j" event={"ID":"dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9","Type":"ContainerStarted","Data":"1a30673dfb1367adc1df3960a6058fd890208e346def51e21577fc0f7fa35a94"} Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.338920 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-pcp6j" event={"ID":"dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9","Type":"ContainerStarted","Data":"f6f8aa02c08da55d4f6564aa6c7b534ce683aea8c39edb9d0c5a7218a2da427d"} Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.344166 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-857kt" Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.344218 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-gtb8k" Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.349083 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:40 crc kubenswrapper[4721]: E1202 11:53:40.349485 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:40.849467937 +0000 UTC m=+150.588018286 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.404302 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" podStartSLOduration=130.404282182 podStartE2EDuration="2m10.404282182s" podCreationTimestamp="2025-12-02 11:51:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:40.403728044 +0000 UTC m=+150.142278393" watchObservedRunningTime="2025-12-02 11:53:40.404282182 +0000 UTC m=+150.142832531" Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.450059 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:40 crc kubenswrapper[4721]: E1202 11:53:40.451231 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:40.951214616 +0000 UTC m=+150.689764965 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.552952 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:40 crc kubenswrapper[4721]: E1202 11:53:40.553463 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:41.053447549 +0000 UTC m=+150.791997898 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.640005 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-94q47" Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.655106 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:40 crc kubenswrapper[4721]: E1202 11:53:40.655451 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:41.155438085 +0000 UTC m=+150.893988434 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.657126 4721 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.755738 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:40 crc kubenswrapper[4721]: E1202 11:53:40.755828 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:41.25581054 +0000 UTC m=+150.994360889 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.755963 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:40 crc kubenswrapper[4721]: E1202 11:53:40.756318 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:41.256310226 +0000 UTC m=+150.994860575 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.856895 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:40 crc kubenswrapper[4721]: E1202 11:53:40.857188 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:41.357149706 +0000 UTC m=+151.095700055 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.857277 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:40 crc kubenswrapper[4721]: E1202 11:53:40.857621 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:41.35761222 +0000 UTC m=+151.096162569 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.957927 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:40 crc kubenswrapper[4721]: E1202 11:53:40.958103 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 11:53:41.458075288 +0000 UTC m=+151.196625637 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.958202 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:40 crc kubenswrapper[4721]: E1202 11:53:40.958583 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 11:53:41.458569664 +0000 UTC m=+151.197120013 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-tkr2s" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.985276 4721 patch_prober.go:28] interesting pod/router-default-5444994796-wtlwb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 11:53:40 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Dec 02 11:53:40 crc kubenswrapper[4721]: [+]process-running ok Dec 02 11:53:40 crc kubenswrapper[4721]: healthz check failed Dec 02 11:53:40 crc kubenswrapper[4721]: I1202 11:53:40.985329 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtlwb" podUID="a26573b3-3899-4b81-ba01-fabea24142cc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.029747 4721 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-02T11:53:40.657149139Z","Handler":null,"Name":""} Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.034499 4721 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.034572 4721 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.059733 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.070389 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.161114 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.166971 4721 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.167042 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.188267 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-tkr2s\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.346508 4721 generic.go:334] "Generic (PLEG): container finished" podID="e7980e22-75e8-4495-afb8-f486065789c4" containerID="0ea38412c49ec404101d03174feee3413ce3857d963fb948b8eb659016ba75c3" exitCode=0 Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.346638 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411265-f5k6f" event={"ID":"e7980e22-75e8-4495-afb8-f486065789c4","Type":"ContainerDied","Data":"0ea38412c49ec404101d03174feee3413ce3857d963fb948b8eb659016ba75c3"} Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.350984 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-pcp6j" event={"ID":"dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9","Type":"ContainerStarted","Data":"7a2e1c28888f8e1e89299bbea522b71ecb435a0df38944edd1492d4b0493254f"} Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.351028 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-pcp6j" event={"ID":"dd5354f2-3190-4ef8-98b2-fc0f2ac61ed9","Type":"ContainerStarted","Data":"6fefa5de240001355b326c17913c50f572d41923c616abdf044ca419b71c4c0d"} Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.355277 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8fw86"] Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.356166 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8fw86" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.357998 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.369499 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8fw86"] Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.408271 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-pcp6j" podStartSLOduration=10.408253097 podStartE2EDuration="10.408253097s" podCreationTimestamp="2025-12-02 11:53:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:41.405228961 +0000 UTC m=+151.143779330" watchObservedRunningTime="2025-12-02 11:53:41.408253097 +0000 UTC m=+151.146803456" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.464131 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94f45fa8-3517-4376-b5c0-8d6cf0254027-utilities\") pod \"certified-operators-8fw86\" (UID: \"94f45fa8-3517-4376-b5c0-8d6cf0254027\") " pod="openshift-marketplace/certified-operators-8fw86" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.466114 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpkpf\" (UniqueName: \"kubernetes.io/projected/94f45fa8-3517-4376-b5c0-8d6cf0254027-kube-api-access-zpkpf\") pod \"certified-operators-8fw86\" (UID: \"94f45fa8-3517-4376-b5c0-8d6cf0254027\") " pod="openshift-marketplace/certified-operators-8fw86" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.467211 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94f45fa8-3517-4376-b5c0-8d6cf0254027-catalog-content\") pod \"certified-operators-8fw86\" (UID: \"94f45fa8-3517-4376-b5c0-8d6cf0254027\") " pod="openshift-marketplace/certified-operators-8fw86" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.488251 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.556858 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4k25r"] Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.557991 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4k25r" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.559953 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.568156 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94f45fa8-3517-4376-b5c0-8d6cf0254027-catalog-content\") pod \"certified-operators-8fw86\" (UID: \"94f45fa8-3517-4376-b5c0-8d6cf0254027\") " pod="openshift-marketplace/certified-operators-8fw86" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.568241 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94f45fa8-3517-4376-b5c0-8d6cf0254027-utilities\") pod \"certified-operators-8fw86\" (UID: \"94f45fa8-3517-4376-b5c0-8d6cf0254027\") " pod="openshift-marketplace/certified-operators-8fw86" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.568277 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpkpf\" (UniqueName: \"kubernetes.io/projected/94f45fa8-3517-4376-b5c0-8d6cf0254027-kube-api-access-zpkpf\") pod \"certified-operators-8fw86\" (UID: \"94f45fa8-3517-4376-b5c0-8d6cf0254027\") " pod="openshift-marketplace/certified-operators-8fw86" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.568618 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94f45fa8-3517-4376-b5c0-8d6cf0254027-catalog-content\") pod \"certified-operators-8fw86\" (UID: \"94f45fa8-3517-4376-b5c0-8d6cf0254027\") " pod="openshift-marketplace/certified-operators-8fw86" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.568708 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94f45fa8-3517-4376-b5c0-8d6cf0254027-utilities\") pod \"certified-operators-8fw86\" (UID: \"94f45fa8-3517-4376-b5c0-8d6cf0254027\") " pod="openshift-marketplace/certified-operators-8fw86" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.578195 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4k25r"] Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.601241 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpkpf\" (UniqueName: \"kubernetes.io/projected/94f45fa8-3517-4376-b5c0-8d6cf0254027-kube-api-access-zpkpf\") pod \"certified-operators-8fw86\" (UID: \"94f45fa8-3517-4376-b5c0-8d6cf0254027\") " pod="openshift-marketplace/certified-operators-8fw86" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.669769 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8fw86" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.670133 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpxgx\" (UniqueName: \"kubernetes.io/projected/f1e35ec2-7693-4103-97e1-3304b8ac93f4-kube-api-access-bpxgx\") pod \"community-operators-4k25r\" (UID: \"f1e35ec2-7693-4103-97e1-3304b8ac93f4\") " pod="openshift-marketplace/community-operators-4k25r" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.670799 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1e35ec2-7693-4103-97e1-3304b8ac93f4-catalog-content\") pod \"community-operators-4k25r\" (UID: \"f1e35ec2-7693-4103-97e1-3304b8ac93f4\") " pod="openshift-marketplace/community-operators-4k25r" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.670907 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1e35ec2-7693-4103-97e1-3304b8ac93f4-utilities\") pod \"community-operators-4k25r\" (UID: \"f1e35ec2-7693-4103-97e1-3304b8ac93f4\") " pod="openshift-marketplace/community-operators-4k25r" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.747971 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-tkr2s"] Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.752504 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-69prw"] Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.753458 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-69prw" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.761510 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-69prw"] Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.784671 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1e35ec2-7693-4103-97e1-3304b8ac93f4-catalog-content\") pod \"community-operators-4k25r\" (UID: \"f1e35ec2-7693-4103-97e1-3304b8ac93f4\") " pod="openshift-marketplace/community-operators-4k25r" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.784742 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1e35ec2-7693-4103-97e1-3304b8ac93f4-utilities\") pod \"community-operators-4k25r\" (UID: \"f1e35ec2-7693-4103-97e1-3304b8ac93f4\") " pod="openshift-marketplace/community-operators-4k25r" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.784820 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpxgx\" (UniqueName: \"kubernetes.io/projected/f1e35ec2-7693-4103-97e1-3304b8ac93f4-kube-api-access-bpxgx\") pod \"community-operators-4k25r\" (UID: \"f1e35ec2-7693-4103-97e1-3304b8ac93f4\") " pod="openshift-marketplace/community-operators-4k25r" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.785731 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1e35ec2-7693-4103-97e1-3304b8ac93f4-catalog-content\") pod \"community-operators-4k25r\" (UID: \"f1e35ec2-7693-4103-97e1-3304b8ac93f4\") " pod="openshift-marketplace/community-operators-4k25r" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.786052 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1e35ec2-7693-4103-97e1-3304b8ac93f4-utilities\") pod \"community-operators-4k25r\" (UID: \"f1e35ec2-7693-4103-97e1-3304b8ac93f4\") " pod="openshift-marketplace/community-operators-4k25r" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.813027 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpxgx\" (UniqueName: \"kubernetes.io/projected/f1e35ec2-7693-4103-97e1-3304b8ac93f4-kube-api-access-bpxgx\") pod \"community-operators-4k25r\" (UID: \"f1e35ec2-7693-4103-97e1-3304b8ac93f4\") " pod="openshift-marketplace/community-operators-4k25r" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.874087 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4k25r" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.885780 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nc77\" (UniqueName: \"kubernetes.io/projected/e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d-kube-api-access-6nc77\") pod \"certified-operators-69prw\" (UID: \"e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d\") " pod="openshift-marketplace/certified-operators-69prw" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.885884 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d-catalog-content\") pod \"certified-operators-69prw\" (UID: \"e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d\") " pod="openshift-marketplace/certified-operators-69prw" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.885918 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d-utilities\") pod \"certified-operators-69prw\" (UID: \"e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d\") " pod="openshift-marketplace/certified-operators-69prw" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.914099 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8fw86"] Dec 02 11:53:41 crc kubenswrapper[4721]: W1202 11:53:41.930274 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod94f45fa8_3517_4376_b5c0_8d6cf0254027.slice/crio-9527b67e8ce16ec0f2374fd19b42412a7f71c8cac49e9cd53a70f092b4f68df9 WatchSource:0}: Error finding container 9527b67e8ce16ec0f2374fd19b42412a7f71c8cac49e9cd53a70f092b4f68df9: Status 404 returned error can't find the container with id 9527b67e8ce16ec0f2374fd19b42412a7f71c8cac49e9cd53a70f092b4f68df9 Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.945894 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wpd4r"] Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.947186 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wpd4r" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.964423 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wpd4r"] Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.985679 4721 patch_prober.go:28] interesting pod/router-default-5444994796-wtlwb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 11:53:41 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Dec 02 11:53:41 crc kubenswrapper[4721]: [+]process-running ok Dec 02 11:53:41 crc kubenswrapper[4721]: healthz check failed Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.985738 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtlwb" podUID="a26573b3-3899-4b81-ba01-fabea24142cc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.987385 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nc77\" (UniqueName: \"kubernetes.io/projected/e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d-kube-api-access-6nc77\") pod \"certified-operators-69prw\" (UID: \"e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d\") " pod="openshift-marketplace/certified-operators-69prw" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.987475 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d-catalog-content\") pod \"certified-operators-69prw\" (UID: \"e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d\") " pod="openshift-marketplace/certified-operators-69prw" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.987515 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d-utilities\") pod \"certified-operators-69prw\" (UID: \"e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d\") " pod="openshift-marketplace/certified-operators-69prw" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.988160 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d-utilities\") pod \"certified-operators-69prw\" (UID: \"e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d\") " pod="openshift-marketplace/certified-operators-69prw" Dec 02 11:53:41 crc kubenswrapper[4721]: I1202 11:53:41.988584 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d-catalog-content\") pod \"certified-operators-69prw\" (UID: \"e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d\") " pod="openshift-marketplace/certified-operators-69prw" Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.008058 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nc77\" (UniqueName: \"kubernetes.io/projected/e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d-kube-api-access-6nc77\") pod \"certified-operators-69prw\" (UID: \"e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d\") " pod="openshift-marketplace/certified-operators-69prw" Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.089229 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hrbj\" (UniqueName: \"kubernetes.io/projected/9610e93f-3f1c-4f61-a171-2e9739620178-kube-api-access-5hrbj\") pod \"community-operators-wpd4r\" (UID: \"9610e93f-3f1c-4f61-a171-2e9739620178\") " pod="openshift-marketplace/community-operators-wpd4r" Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.089319 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9610e93f-3f1c-4f61-a171-2e9739620178-catalog-content\") pod \"community-operators-wpd4r\" (UID: \"9610e93f-3f1c-4f61-a171-2e9739620178\") " pod="openshift-marketplace/community-operators-wpd4r" Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.089363 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9610e93f-3f1c-4f61-a171-2e9739620178-utilities\") pod \"community-operators-wpd4r\" (UID: \"9610e93f-3f1c-4f61-a171-2e9739620178\") " pod="openshift-marketplace/community-operators-wpd4r" Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.097079 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-69prw" Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.122509 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4k25r"] Dec 02 11:53:42 crc kubenswrapper[4721]: W1202 11:53:42.128280 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf1e35ec2_7693_4103_97e1_3304b8ac93f4.slice/crio-e0b37a46efff5578c5ff68c94440f0fca3df8578b4b16dd377101bdf5664c3a1 WatchSource:0}: Error finding container e0b37a46efff5578c5ff68c94440f0fca3df8578b4b16dd377101bdf5664c3a1: Status 404 returned error can't find the container with id e0b37a46efff5578c5ff68c94440f0fca3df8578b4b16dd377101bdf5664c3a1 Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.190193 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hrbj\" (UniqueName: \"kubernetes.io/projected/9610e93f-3f1c-4f61-a171-2e9739620178-kube-api-access-5hrbj\") pod \"community-operators-wpd4r\" (UID: \"9610e93f-3f1c-4f61-a171-2e9739620178\") " pod="openshift-marketplace/community-operators-wpd4r" Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.190566 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9610e93f-3f1c-4f61-a171-2e9739620178-catalog-content\") pod \"community-operators-wpd4r\" (UID: \"9610e93f-3f1c-4f61-a171-2e9739620178\") " pod="openshift-marketplace/community-operators-wpd4r" Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.190590 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9610e93f-3f1c-4f61-a171-2e9739620178-utilities\") pod \"community-operators-wpd4r\" (UID: \"9610e93f-3f1c-4f61-a171-2e9739620178\") " pod="openshift-marketplace/community-operators-wpd4r" Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.191074 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9610e93f-3f1c-4f61-a171-2e9739620178-utilities\") pod \"community-operators-wpd4r\" (UID: \"9610e93f-3f1c-4f61-a171-2e9739620178\") " pod="openshift-marketplace/community-operators-wpd4r" Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.191651 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9610e93f-3f1c-4f61-a171-2e9739620178-catalog-content\") pod \"community-operators-wpd4r\" (UID: \"9610e93f-3f1c-4f61-a171-2e9739620178\") " pod="openshift-marketplace/community-operators-wpd4r" Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.207115 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hrbj\" (UniqueName: \"kubernetes.io/projected/9610e93f-3f1c-4f61-a171-2e9739620178-kube-api-access-5hrbj\") pod \"community-operators-wpd4r\" (UID: \"9610e93f-3f1c-4f61-a171-2e9739620178\") " pod="openshift-marketplace/community-operators-wpd4r" Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.269611 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wpd4r" Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.332526 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-69prw"] Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.362782 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" event={"ID":"ed8a3365-c6aa-472a-bb41-10905fcabbf6","Type":"ContainerStarted","Data":"c0c3683e38762dd5217e7f03057453a9ae51f782d3c88073a73f8a70cbc08b41"} Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.362819 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" event={"ID":"ed8a3365-c6aa-472a-bb41-10905fcabbf6","Type":"ContainerStarted","Data":"ee0334c1c87e68032d31064a6bdd2bfc571483eaf0e1f7527f989688f8a4be8f"} Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.363757 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.370504 4721 generic.go:334] "Generic (PLEG): container finished" podID="94f45fa8-3517-4376-b5c0-8d6cf0254027" containerID="828be2538b4c67ba1f07a333f9c900413a2f3b06829984905a48580bee6b2531" exitCode=0 Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.370552 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8fw86" event={"ID":"94f45fa8-3517-4376-b5c0-8d6cf0254027","Type":"ContainerDied","Data":"828be2538b4c67ba1f07a333f9c900413a2f3b06829984905a48580bee6b2531"} Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.370576 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8fw86" event={"ID":"94f45fa8-3517-4376-b5c0-8d6cf0254027","Type":"ContainerStarted","Data":"9527b67e8ce16ec0f2374fd19b42412a7f71c8cac49e9cd53a70f092b4f68df9"} Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.375298 4721 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.377735 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4k25r" event={"ID":"f1e35ec2-7693-4103-97e1-3304b8ac93f4","Type":"ContainerStarted","Data":"085f3f87c3026e5868a3a02449b3a712fb715852b72b83c7ab4e8b67b60168ca"} Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.377759 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4k25r" event={"ID":"f1e35ec2-7693-4103-97e1-3304b8ac93f4","Type":"ContainerStarted","Data":"e0b37a46efff5578c5ff68c94440f0fca3df8578b4b16dd377101bdf5664c3a1"} Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.387791 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" podStartSLOduration=131.387773719 podStartE2EDuration="2m11.387773719s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:42.385329662 +0000 UTC m=+152.123880011" watchObservedRunningTime="2025-12-02 11:53:42.387773719 +0000 UTC m=+152.126324068" Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.555801 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wpd4r"] Dec 02 11:53:42 crc kubenswrapper[4721]: W1202 11:53:42.574023 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9610e93f_3f1c_4f61_a171_2e9739620178.slice/crio-e61b7a3190369cf575115bf01f498a1eef6b84f39cab33098ae75483d70dee56 WatchSource:0}: Error finding container e61b7a3190369cf575115bf01f498a1eef6b84f39cab33098ae75483d70dee56: Status 404 returned error can't find the container with id e61b7a3190369cf575115bf01f498a1eef6b84f39cab33098ae75483d70dee56 Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.762871 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411265-f5k6f" Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.846246 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.907465 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e7980e22-75e8-4495-afb8-f486065789c4-config-volume\") pod \"e7980e22-75e8-4495-afb8-f486065789c4\" (UID: \"e7980e22-75e8-4495-afb8-f486065789c4\") " Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.907532 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e7980e22-75e8-4495-afb8-f486065789c4-secret-volume\") pod \"e7980e22-75e8-4495-afb8-f486065789c4\" (UID: \"e7980e22-75e8-4495-afb8-f486065789c4\") " Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.907598 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t7fnp\" (UniqueName: \"kubernetes.io/projected/e7980e22-75e8-4495-afb8-f486065789c4-kube-api-access-t7fnp\") pod \"e7980e22-75e8-4495-afb8-f486065789c4\" (UID: \"e7980e22-75e8-4495-afb8-f486065789c4\") " Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.908044 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7980e22-75e8-4495-afb8-f486065789c4-config-volume" (OuterVolumeSpecName: "config-volume") pod "e7980e22-75e8-4495-afb8-f486065789c4" (UID: "e7980e22-75e8-4495-afb8-f486065789c4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.915927 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7980e22-75e8-4495-afb8-f486065789c4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e7980e22-75e8-4495-afb8-f486065789c4" (UID: "e7980e22-75e8-4495-afb8-f486065789c4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.916668 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7980e22-75e8-4495-afb8-f486065789c4-kube-api-access-t7fnp" (OuterVolumeSpecName: "kube-api-access-t7fnp") pod "e7980e22-75e8-4495-afb8-f486065789c4" (UID: "e7980e22-75e8-4495-afb8-f486065789c4"). InnerVolumeSpecName "kube-api-access-t7fnp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.986239 4721 patch_prober.go:28] interesting pod/router-default-5444994796-wtlwb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 11:53:42 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Dec 02 11:53:42 crc kubenswrapper[4721]: [+]process-running ok Dec 02 11:53:42 crc kubenswrapper[4721]: healthz check failed Dec 02 11:53:42 crc kubenswrapper[4721]: I1202 11:53:42.986301 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtlwb" podUID="a26573b3-3899-4b81-ba01-fabea24142cc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.009658 4721 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e7980e22-75e8-4495-afb8-f486065789c4-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.009698 4721 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e7980e22-75e8-4495-afb8-f486065789c4-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.009723 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t7fnp\" (UniqueName: \"kubernetes.io/projected/e7980e22-75e8-4495-afb8-f486065789c4-kube-api-access-t7fnp\") on node \"crc\" DevicePath \"\"" Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.346582 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4qzrw"] Dec 02 11:53:43 crc kubenswrapper[4721]: E1202 11:53:43.346838 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7980e22-75e8-4495-afb8-f486065789c4" containerName="collect-profiles" Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.346852 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7980e22-75e8-4495-afb8-f486065789c4" containerName="collect-profiles" Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.346976 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7980e22-75e8-4495-afb8-f486065789c4" containerName="collect-profiles" Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.347844 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4qzrw" Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.349321 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.361682 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4qzrw"] Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.385639 4721 generic.go:334] "Generic (PLEG): container finished" podID="f1e35ec2-7693-4103-97e1-3304b8ac93f4" containerID="085f3f87c3026e5868a3a02449b3a712fb715852b72b83c7ab4e8b67b60168ca" exitCode=0 Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.385704 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4k25r" event={"ID":"f1e35ec2-7693-4103-97e1-3304b8ac93f4","Type":"ContainerDied","Data":"085f3f87c3026e5868a3a02449b3a712fb715852b72b83c7ab4e8b67b60168ca"} Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.387097 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411265-f5k6f" event={"ID":"e7980e22-75e8-4495-afb8-f486065789c4","Type":"ContainerDied","Data":"7343c0e911321540c672be7c133b60e3d14ae0fb6e7f9fba093ec3b73ae57e78"} Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.387130 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7343c0e911321540c672be7c133b60e3d14ae0fb6e7f9fba093ec3b73ae57e78" Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.387153 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411265-f5k6f" Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.396401 4721 generic.go:334] "Generic (PLEG): container finished" podID="e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d" containerID="5f103363afda81262bed0ead42b675e283ce72dcc3ee538618aa32abbb889584" exitCode=0 Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.396670 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-69prw" event={"ID":"e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d","Type":"ContainerDied","Data":"5f103363afda81262bed0ead42b675e283ce72dcc3ee538618aa32abbb889584"} Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.397097 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-69prw" event={"ID":"e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d","Type":"ContainerStarted","Data":"f27d05f5596d53b546419e7399b6ab0e35f07c539623c16bc7ad04430189a8aa"} Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.418946 4721 generic.go:334] "Generic (PLEG): container finished" podID="9610e93f-3f1c-4f61-a171-2e9739620178" containerID="103e4b9f210b16b87fcb300e4e6e18e880bb41f0b454db426330f29a80b2a148" exitCode=0 Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.419792 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wpd4r" event={"ID":"9610e93f-3f1c-4f61-a171-2e9739620178","Type":"ContainerDied","Data":"103e4b9f210b16b87fcb300e4e6e18e880bb41f0b454db426330f29a80b2a148"} Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.420025 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wpd4r" event={"ID":"9610e93f-3f1c-4f61-a171-2e9739620178","Type":"ContainerStarted","Data":"e61b7a3190369cf575115bf01f498a1eef6b84f39cab33098ae75483d70dee56"} Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.514889 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44-catalog-content\") pod \"redhat-marketplace-4qzrw\" (UID: \"5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44\") " pod="openshift-marketplace/redhat-marketplace-4qzrw" Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.514964 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44-utilities\") pod \"redhat-marketplace-4qzrw\" (UID: \"5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44\") " pod="openshift-marketplace/redhat-marketplace-4qzrw" Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.515090 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bglfp\" (UniqueName: \"kubernetes.io/projected/5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44-kube-api-access-bglfp\") pod \"redhat-marketplace-4qzrw\" (UID: \"5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44\") " pod="openshift-marketplace/redhat-marketplace-4qzrw" Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.617648 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44-utilities\") pod \"redhat-marketplace-4qzrw\" (UID: \"5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44\") " pod="openshift-marketplace/redhat-marketplace-4qzrw" Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.617844 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bglfp\" (UniqueName: \"kubernetes.io/projected/5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44-kube-api-access-bglfp\") pod \"redhat-marketplace-4qzrw\" (UID: \"5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44\") " pod="openshift-marketplace/redhat-marketplace-4qzrw" Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.617900 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44-catalog-content\") pod \"redhat-marketplace-4qzrw\" (UID: \"5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44\") " pod="openshift-marketplace/redhat-marketplace-4qzrw" Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.618622 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44-catalog-content\") pod \"redhat-marketplace-4qzrw\" (UID: \"5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44\") " pod="openshift-marketplace/redhat-marketplace-4qzrw" Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.618864 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44-utilities\") pod \"redhat-marketplace-4qzrw\" (UID: \"5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44\") " pod="openshift-marketplace/redhat-marketplace-4qzrw" Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.637025 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bglfp\" (UniqueName: \"kubernetes.io/projected/5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44-kube-api-access-bglfp\") pod \"redhat-marketplace-4qzrw\" (UID: \"5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44\") " pod="openshift-marketplace/redhat-marketplace-4qzrw" Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.674267 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4qzrw" Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.748439 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7l4c7"] Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.749712 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7l4c7" Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.763010 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7l4c7"] Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.919100 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4qzrw"] Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.921894 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d84nk\" (UniqueName: \"kubernetes.io/projected/4dddd92c-299e-49fa-8c1b-e68c31f2a743-kube-api-access-d84nk\") pod \"redhat-marketplace-7l4c7\" (UID: \"4dddd92c-299e-49fa-8c1b-e68c31f2a743\") " pod="openshift-marketplace/redhat-marketplace-7l4c7" Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.921942 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4dddd92c-299e-49fa-8c1b-e68c31f2a743-catalog-content\") pod \"redhat-marketplace-7l4c7\" (UID: \"4dddd92c-299e-49fa-8c1b-e68c31f2a743\") " pod="openshift-marketplace/redhat-marketplace-7l4c7" Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.921974 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4dddd92c-299e-49fa-8c1b-e68c31f2a743-utilities\") pod \"redhat-marketplace-7l4c7\" (UID: \"4dddd92c-299e-49fa-8c1b-e68c31f2a743\") " pod="openshift-marketplace/redhat-marketplace-7l4c7" Dec 02 11:53:43 crc kubenswrapper[4721]: W1202 11:53:43.962650 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5e1d4456_c1ef_4ab1_9b58_5ae5b7f8ef44.slice/crio-a810b1da32c93e24800404009f123c028496c160379f847aa2b1afacfebc5ea8 WatchSource:0}: Error finding container a810b1da32c93e24800404009f123c028496c160379f847aa2b1afacfebc5ea8: Status 404 returned error can't find the container with id a810b1da32c93e24800404009f123c028496c160379f847aa2b1afacfebc5ea8 Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.982778 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-wtlwb" Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.986007 4721 patch_prober.go:28] interesting pod/router-default-5444994796-wtlwb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 11:53:43 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Dec 02 11:53:43 crc kubenswrapper[4721]: [+]process-running ok Dec 02 11:53:43 crc kubenswrapper[4721]: healthz check failed Dec 02 11:53:43 crc kubenswrapper[4721]: I1202 11:53:43.986064 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtlwb" podUID="a26573b3-3899-4b81-ba01-fabea24142cc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.023169 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4dddd92c-299e-49fa-8c1b-e68c31f2a743-catalog-content\") pod \"redhat-marketplace-7l4c7\" (UID: \"4dddd92c-299e-49fa-8c1b-e68c31f2a743\") " pod="openshift-marketplace/redhat-marketplace-7l4c7" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.023255 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4dddd92c-299e-49fa-8c1b-e68c31f2a743-utilities\") pod \"redhat-marketplace-7l4c7\" (UID: \"4dddd92c-299e-49fa-8c1b-e68c31f2a743\") " pod="openshift-marketplace/redhat-marketplace-7l4c7" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.023403 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d84nk\" (UniqueName: \"kubernetes.io/projected/4dddd92c-299e-49fa-8c1b-e68c31f2a743-kube-api-access-d84nk\") pod \"redhat-marketplace-7l4c7\" (UID: \"4dddd92c-299e-49fa-8c1b-e68c31f2a743\") " pod="openshift-marketplace/redhat-marketplace-7l4c7" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.024962 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4dddd92c-299e-49fa-8c1b-e68c31f2a743-catalog-content\") pod \"redhat-marketplace-7l4c7\" (UID: \"4dddd92c-299e-49fa-8c1b-e68c31f2a743\") " pod="openshift-marketplace/redhat-marketplace-7l4c7" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.025790 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4dddd92c-299e-49fa-8c1b-e68c31f2a743-utilities\") pod \"redhat-marketplace-7l4c7\" (UID: \"4dddd92c-299e-49fa-8c1b-e68c31f2a743\") " pod="openshift-marketplace/redhat-marketplace-7l4c7" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.046476 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.047425 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.049676 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.050456 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.052371 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d84nk\" (UniqueName: \"kubernetes.io/projected/4dddd92c-299e-49fa-8c1b-e68c31f2a743-kube-api-access-d84nk\") pod \"redhat-marketplace-7l4c7\" (UID: \"4dddd92c-299e-49fa-8c1b-e68c31f2a743\") " pod="openshift-marketplace/redhat-marketplace-7l4c7" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.055314 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.068638 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7l4c7" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.171097 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-tl5t9" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.171139 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-tl5t9" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.174110 4721 patch_prober.go:28] interesting pod/console-f9d7485db-tl5t9 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.16:8443/health\": dial tcp 10.217.0.16:8443: connect: connection refused" start-of-body= Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.174165 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-tl5t9" podUID="54beb89c-d2c3-42c8-a186-465ebcbb4e4b" containerName="console" probeResult="failure" output="Get \"https://10.217.0.16:8443/health\": dial tcp 10.217.0.16:8443: connect: connection refused" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.226437 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f43f9793-56f4-4898-a0ae-bfcc5d3218c2-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"f43f9793-56f4-4898-a0ae-bfcc5d3218c2\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.226486 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f43f9793-56f4-4898-a0ae-bfcc5d3218c2-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"f43f9793-56f4-4898-a0ae-bfcc5d3218c2\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.272973 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7l4c7"] Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.277950 4721 patch_prober.go:28] interesting pod/downloads-7954f5f757-qx7nr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.278005 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-qx7nr" podUID="78c789c6-4ac0-4a7b-8cb6-b072909ccf03" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.278017 4721 patch_prober.go:28] interesting pod/downloads-7954f5f757-qx7nr container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.278077 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-qx7nr" podUID="78c789c6-4ac0-4a7b-8cb6-b072909ccf03" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.327245 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f43f9793-56f4-4898-a0ae-bfcc5d3218c2-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"f43f9793-56f4-4898-a0ae-bfcc5d3218c2\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.327326 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f43f9793-56f4-4898-a0ae-bfcc5d3218c2-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"f43f9793-56f4-4898-a0ae-bfcc5d3218c2\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.328322 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f43f9793-56f4-4898-a0ae-bfcc5d3218c2-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"f43f9793-56f4-4898-a0ae-bfcc5d3218c2\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.360505 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f43f9793-56f4-4898-a0ae-bfcc5d3218c2-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"f43f9793-56f4-4898-a0ae-bfcc5d3218c2\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.362609 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.368962 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-qpb9s" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.383211 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.483858 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7l4c7" event={"ID":"4dddd92c-299e-49fa-8c1b-e68c31f2a743","Type":"ContainerStarted","Data":"a5b0106e884aa9c1e3dc49ed453e0077441941a3bead9fcaaa050b0c378fca07"} Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.495932 4721 generic.go:334] "Generic (PLEG): container finished" podID="5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44" containerID="cee41dccd15454a4afaeca3f0862d4d9f04fb390e662e1e73e1cce7a460df046" exitCode=0 Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.496099 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4qzrw" event={"ID":"5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44","Type":"ContainerDied","Data":"cee41dccd15454a4afaeca3f0862d4d9f04fb390e662e1e73e1cce7a460df046"} Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.496148 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4qzrw" event={"ID":"5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44","Type":"ContainerStarted","Data":"a810b1da32c93e24800404009f123c028496c160379f847aa2b1afacfebc5ea8"} Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.548838 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-24l7f"] Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.550010 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-24l7f" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.552763 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.563379 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-24l7f"] Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.636606 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a704275-5d23-4f38-805d-e0f8be9b50dc-utilities\") pod \"redhat-operators-24l7f\" (UID: \"9a704275-5d23-4f38-805d-e0f8be9b50dc\") " pod="openshift-marketplace/redhat-operators-24l7f" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.637057 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhs9j\" (UniqueName: \"kubernetes.io/projected/9a704275-5d23-4f38-805d-e0f8be9b50dc-kube-api-access-lhs9j\") pod \"redhat-operators-24l7f\" (UID: \"9a704275-5d23-4f38-805d-e0f8be9b50dc\") " pod="openshift-marketplace/redhat-operators-24l7f" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.637115 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a704275-5d23-4f38-805d-e0f8be9b50dc-catalog-content\") pod \"redhat-operators-24l7f\" (UID: \"9a704275-5d23-4f38-805d-e0f8be9b50dc\") " pod="openshift-marketplace/redhat-operators-24l7f" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.664831 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.664888 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.677485 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.739087 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a704275-5d23-4f38-805d-e0f8be9b50dc-utilities\") pod \"redhat-operators-24l7f\" (UID: \"9a704275-5d23-4f38-805d-e0f8be9b50dc\") " pod="openshift-marketplace/redhat-operators-24l7f" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.739165 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhs9j\" (UniqueName: \"kubernetes.io/projected/9a704275-5d23-4f38-805d-e0f8be9b50dc-kube-api-access-lhs9j\") pod \"redhat-operators-24l7f\" (UID: \"9a704275-5d23-4f38-805d-e0f8be9b50dc\") " pod="openshift-marketplace/redhat-operators-24l7f" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.739198 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a704275-5d23-4f38-805d-e0f8be9b50dc-catalog-content\") pod \"redhat-operators-24l7f\" (UID: \"9a704275-5d23-4f38-805d-e0f8be9b50dc\") " pod="openshift-marketplace/redhat-operators-24l7f" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.739919 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a704275-5d23-4f38-805d-e0f8be9b50dc-catalog-content\") pod \"redhat-operators-24l7f\" (UID: \"9a704275-5d23-4f38-805d-e0f8be9b50dc\") " pod="openshift-marketplace/redhat-operators-24l7f" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.740897 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a704275-5d23-4f38-805d-e0f8be9b50dc-utilities\") pod \"redhat-operators-24l7f\" (UID: \"9a704275-5d23-4f38-805d-e0f8be9b50dc\") " pod="openshift-marketplace/redhat-operators-24l7f" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.741307 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 02 11:53:44 crc kubenswrapper[4721]: W1202 11:53:44.753575 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podf43f9793_56f4_4898_a0ae_bfcc5d3218c2.slice/crio-331e21b51c74cdcb75ca0899583fbc41cb81e72cb5b78b966e6ec0a6255600b3 WatchSource:0}: Error finding container 331e21b51c74cdcb75ca0899583fbc41cb81e72cb5b78b966e6ec0a6255600b3: Status 404 returned error can't find the container with id 331e21b51c74cdcb75ca0899583fbc41cb81e72cb5b78b966e6ec0a6255600b3 Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.765315 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhs9j\" (UniqueName: \"kubernetes.io/projected/9a704275-5d23-4f38-805d-e0f8be9b50dc-kube-api-access-lhs9j\") pod \"redhat-operators-24l7f\" (UID: \"9a704275-5d23-4f38-805d-e0f8be9b50dc\") " pod="openshift-marketplace/redhat-operators-24l7f" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.882170 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-24l7f" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.956254 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zxl2w"] Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.957621 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zxl2w" Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.963526 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zxl2w"] Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.985046 4721 patch_prober.go:28] interesting pod/router-default-5444994796-wtlwb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 11:53:44 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Dec 02 11:53:44 crc kubenswrapper[4721]: [+]process-running ok Dec 02 11:53:44 crc kubenswrapper[4721]: healthz check failed Dec 02 11:53:44 crc kubenswrapper[4721]: I1202 11:53:44.985091 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtlwb" podUID="a26573b3-3899-4b81-ba01-fabea24142cc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 11:53:45 crc kubenswrapper[4721]: I1202 11:53:45.043102 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91923222-03a2-43ce-8382-75953af7dcfc-catalog-content\") pod \"redhat-operators-zxl2w\" (UID: \"91923222-03a2-43ce-8382-75953af7dcfc\") " pod="openshift-marketplace/redhat-operators-zxl2w" Dec 02 11:53:45 crc kubenswrapper[4721]: I1202 11:53:45.043177 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgpw8\" (UniqueName: \"kubernetes.io/projected/91923222-03a2-43ce-8382-75953af7dcfc-kube-api-access-cgpw8\") pod \"redhat-operators-zxl2w\" (UID: \"91923222-03a2-43ce-8382-75953af7dcfc\") " pod="openshift-marketplace/redhat-operators-zxl2w" Dec 02 11:53:45 crc kubenswrapper[4721]: I1202 11:53:45.043197 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91923222-03a2-43ce-8382-75953af7dcfc-utilities\") pod \"redhat-operators-zxl2w\" (UID: \"91923222-03a2-43ce-8382-75953af7dcfc\") " pod="openshift-marketplace/redhat-operators-zxl2w" Dec 02 11:53:45 crc kubenswrapper[4721]: I1202 11:53:45.145078 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91923222-03a2-43ce-8382-75953af7dcfc-catalog-content\") pod \"redhat-operators-zxl2w\" (UID: \"91923222-03a2-43ce-8382-75953af7dcfc\") " pod="openshift-marketplace/redhat-operators-zxl2w" Dec 02 11:53:45 crc kubenswrapper[4721]: I1202 11:53:45.145441 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgpw8\" (UniqueName: \"kubernetes.io/projected/91923222-03a2-43ce-8382-75953af7dcfc-kube-api-access-cgpw8\") pod \"redhat-operators-zxl2w\" (UID: \"91923222-03a2-43ce-8382-75953af7dcfc\") " pod="openshift-marketplace/redhat-operators-zxl2w" Dec 02 11:53:45 crc kubenswrapper[4721]: I1202 11:53:45.145465 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91923222-03a2-43ce-8382-75953af7dcfc-utilities\") pod \"redhat-operators-zxl2w\" (UID: \"91923222-03a2-43ce-8382-75953af7dcfc\") " pod="openshift-marketplace/redhat-operators-zxl2w" Dec 02 11:53:45 crc kubenswrapper[4721]: I1202 11:53:45.146078 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91923222-03a2-43ce-8382-75953af7dcfc-catalog-content\") pod \"redhat-operators-zxl2w\" (UID: \"91923222-03a2-43ce-8382-75953af7dcfc\") " pod="openshift-marketplace/redhat-operators-zxl2w" Dec 02 11:53:45 crc kubenswrapper[4721]: I1202 11:53:45.146092 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91923222-03a2-43ce-8382-75953af7dcfc-utilities\") pod \"redhat-operators-zxl2w\" (UID: \"91923222-03a2-43ce-8382-75953af7dcfc\") " pod="openshift-marketplace/redhat-operators-zxl2w" Dec 02 11:53:45 crc kubenswrapper[4721]: I1202 11:53:45.166225 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgpw8\" (UniqueName: \"kubernetes.io/projected/91923222-03a2-43ce-8382-75953af7dcfc-kube-api-access-cgpw8\") pod \"redhat-operators-zxl2w\" (UID: \"91923222-03a2-43ce-8382-75953af7dcfc\") " pod="openshift-marketplace/redhat-operators-zxl2w" Dec 02 11:53:45 crc kubenswrapper[4721]: I1202 11:53:45.194772 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-24l7f"] Dec 02 11:53:45 crc kubenswrapper[4721]: W1202 11:53:45.236678 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9a704275_5d23_4f38_805d_e0f8be9b50dc.slice/crio-ecf656ad151efaf6ea41d416053f0f00783a442c1113e1e0a81317d7c4bfdc75 WatchSource:0}: Error finding container ecf656ad151efaf6ea41d416053f0f00783a442c1113e1e0a81317d7c4bfdc75: Status 404 returned error can't find the container with id ecf656ad151efaf6ea41d416053f0f00783a442c1113e1e0a81317d7c4bfdc75 Dec 02 11:53:45 crc kubenswrapper[4721]: I1202 11:53:45.279699 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zxl2w" Dec 02 11:53:45 crc kubenswrapper[4721]: I1202 11:53:45.504777 4721 generic.go:334] "Generic (PLEG): container finished" podID="9a704275-5d23-4f38-805d-e0f8be9b50dc" containerID="bab484bca3acb35d267b305bbbfa61f099f0a3384d39e8c587f77c621d3e7f43" exitCode=0 Dec 02 11:53:45 crc kubenswrapper[4721]: I1202 11:53:45.505186 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-24l7f" event={"ID":"9a704275-5d23-4f38-805d-e0f8be9b50dc","Type":"ContainerDied","Data":"bab484bca3acb35d267b305bbbfa61f099f0a3384d39e8c587f77c621d3e7f43"} Dec 02 11:53:45 crc kubenswrapper[4721]: I1202 11:53:45.505220 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-24l7f" event={"ID":"9a704275-5d23-4f38-805d-e0f8be9b50dc","Type":"ContainerStarted","Data":"ecf656ad151efaf6ea41d416053f0f00783a442c1113e1e0a81317d7c4bfdc75"} Dec 02 11:53:45 crc kubenswrapper[4721]: I1202 11:53:45.509098 4721 generic.go:334] "Generic (PLEG): container finished" podID="4dddd92c-299e-49fa-8c1b-e68c31f2a743" containerID="e4b93877861582bc1afbe0e0b3f58ceaebc15fe1dd953386675d1d5ad1727dfb" exitCode=0 Dec 02 11:53:45 crc kubenswrapper[4721]: I1202 11:53:45.509203 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7l4c7" event={"ID":"4dddd92c-299e-49fa-8c1b-e68c31f2a743","Type":"ContainerDied","Data":"e4b93877861582bc1afbe0e0b3f58ceaebc15fe1dd953386675d1d5ad1727dfb"} Dec 02 11:53:45 crc kubenswrapper[4721]: I1202 11:53:45.513221 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"f43f9793-56f4-4898-a0ae-bfcc5d3218c2","Type":"ContainerStarted","Data":"354da87563e702d13b4ff62ba48386757cfcae6ad74e36b72d0294a505b3b68c"} Dec 02 11:53:45 crc kubenswrapper[4721]: I1202 11:53:45.513249 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"f43f9793-56f4-4898-a0ae-bfcc5d3218c2","Type":"ContainerStarted","Data":"331e21b51c74cdcb75ca0899583fbc41cb81e72cb5b78b966e6ec0a6255600b3"} Dec 02 11:53:45 crc kubenswrapper[4721]: I1202 11:53:45.519169 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-q8vvn" Dec 02 11:53:45 crc kubenswrapper[4721]: I1202 11:53:45.602850 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zxl2w"] Dec 02 11:53:45 crc kubenswrapper[4721]: I1202 11:53:45.679556 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=1.679538068 podStartE2EDuration="1.679538068s" podCreationTimestamp="2025-12-02 11:53:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:45.678744983 +0000 UTC m=+155.417295332" watchObservedRunningTime="2025-12-02 11:53:45.679538068 +0000 UTC m=+155.418088417" Dec 02 11:53:45 crc kubenswrapper[4721]: I1202 11:53:45.985910 4721 patch_prober.go:28] interesting pod/router-default-5444994796-wtlwb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 11:53:45 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Dec 02 11:53:45 crc kubenswrapper[4721]: [+]process-running ok Dec 02 11:53:45 crc kubenswrapper[4721]: healthz check failed Dec 02 11:53:45 crc kubenswrapper[4721]: I1202 11:53:45.985993 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtlwb" podUID="a26573b3-3899-4b81-ba01-fabea24142cc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 11:53:46 crc kubenswrapper[4721]: I1202 11:53:46.531382 4721 generic.go:334] "Generic (PLEG): container finished" podID="f43f9793-56f4-4898-a0ae-bfcc5d3218c2" containerID="354da87563e702d13b4ff62ba48386757cfcae6ad74e36b72d0294a505b3b68c" exitCode=0 Dec 02 11:53:46 crc kubenswrapper[4721]: I1202 11:53:46.531571 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"f43f9793-56f4-4898-a0ae-bfcc5d3218c2","Type":"ContainerDied","Data":"354da87563e702d13b4ff62ba48386757cfcae6ad74e36b72d0294a505b3b68c"} Dec 02 11:53:46 crc kubenswrapper[4721]: I1202 11:53:46.537564 4721 generic.go:334] "Generic (PLEG): container finished" podID="91923222-03a2-43ce-8382-75953af7dcfc" containerID="cfac5c7ae0ada4ff9d421819065384f31525542d2d3adf625609eb6a8c5c3de6" exitCode=0 Dec 02 11:53:46 crc kubenswrapper[4721]: I1202 11:53:46.538365 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zxl2w" event={"ID":"91923222-03a2-43ce-8382-75953af7dcfc","Type":"ContainerDied","Data":"cfac5c7ae0ada4ff9d421819065384f31525542d2d3adf625609eb6a8c5c3de6"} Dec 02 11:53:46 crc kubenswrapper[4721]: I1202 11:53:46.538401 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zxl2w" event={"ID":"91923222-03a2-43ce-8382-75953af7dcfc","Type":"ContainerStarted","Data":"de7081aa2e06bb06058413a921a30a6ce49b878ab51c2281a7f81532bb52e43c"} Dec 02 11:53:46 crc kubenswrapper[4721]: I1202 11:53:46.985171 4721 patch_prober.go:28] interesting pod/router-default-5444994796-wtlwb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 11:53:46 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Dec 02 11:53:46 crc kubenswrapper[4721]: [+]process-running ok Dec 02 11:53:46 crc kubenswrapper[4721]: healthz check failed Dec 02 11:53:46 crc kubenswrapper[4721]: I1202 11:53:46.985302 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtlwb" podUID="a26573b3-3899-4b81-ba01-fabea24142cc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 11:53:47 crc kubenswrapper[4721]: I1202 11:53:47.297688 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 02 11:53:47 crc kubenswrapper[4721]: I1202 11:53:47.298331 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 11:53:47 crc kubenswrapper[4721]: I1202 11:53:47.299888 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 02 11:53:47 crc kubenswrapper[4721]: I1202 11:53:47.300097 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 02 11:53:47 crc kubenswrapper[4721]: I1202 11:53:47.308178 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 02 11:53:47 crc kubenswrapper[4721]: I1202 11:53:47.388818 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cde3f168-2b2d-4bf1-b886-1020b346a8cb-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"cde3f168-2b2d-4bf1-b886-1020b346a8cb\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 11:53:47 crc kubenswrapper[4721]: I1202 11:53:47.388935 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/cde3f168-2b2d-4bf1-b886-1020b346a8cb-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"cde3f168-2b2d-4bf1-b886-1020b346a8cb\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 11:53:47 crc kubenswrapper[4721]: I1202 11:53:47.490055 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/cde3f168-2b2d-4bf1-b886-1020b346a8cb-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"cde3f168-2b2d-4bf1-b886-1020b346a8cb\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 11:53:47 crc kubenswrapper[4721]: I1202 11:53:47.490161 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/cde3f168-2b2d-4bf1-b886-1020b346a8cb-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"cde3f168-2b2d-4bf1-b886-1020b346a8cb\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 11:53:47 crc kubenswrapper[4721]: I1202 11:53:47.490177 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cde3f168-2b2d-4bf1-b886-1020b346a8cb-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"cde3f168-2b2d-4bf1-b886-1020b346a8cb\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 11:53:47 crc kubenswrapper[4721]: I1202 11:53:47.511847 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cde3f168-2b2d-4bf1-b886-1020b346a8cb-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"cde3f168-2b2d-4bf1-b886-1020b346a8cb\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 11:53:47 crc kubenswrapper[4721]: I1202 11:53:47.627697 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 11:53:47 crc kubenswrapper[4721]: I1202 11:53:47.799972 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 11:53:47 crc kubenswrapper[4721]: I1202 11:53:47.895169 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f43f9793-56f4-4898-a0ae-bfcc5d3218c2-kube-api-access\") pod \"f43f9793-56f4-4898-a0ae-bfcc5d3218c2\" (UID: \"f43f9793-56f4-4898-a0ae-bfcc5d3218c2\") " Dec 02 11:53:47 crc kubenswrapper[4721]: I1202 11:53:47.895493 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f43f9793-56f4-4898-a0ae-bfcc5d3218c2-kubelet-dir\") pod \"f43f9793-56f4-4898-a0ae-bfcc5d3218c2\" (UID: \"f43f9793-56f4-4898-a0ae-bfcc5d3218c2\") " Dec 02 11:53:47 crc kubenswrapper[4721]: I1202 11:53:47.895789 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f43f9793-56f4-4898-a0ae-bfcc5d3218c2-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "f43f9793-56f4-4898-a0ae-bfcc5d3218c2" (UID: "f43f9793-56f4-4898-a0ae-bfcc5d3218c2"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 11:53:47 crc kubenswrapper[4721]: I1202 11:53:47.898929 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f43f9793-56f4-4898-a0ae-bfcc5d3218c2-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "f43f9793-56f4-4898-a0ae-bfcc5d3218c2" (UID: "f43f9793-56f4-4898-a0ae-bfcc5d3218c2"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:53:47 crc kubenswrapper[4721]: I1202 11:53:47.986257 4721 patch_prober.go:28] interesting pod/router-default-5444994796-wtlwb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 11:53:47 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Dec 02 11:53:47 crc kubenswrapper[4721]: [+]process-running ok Dec 02 11:53:47 crc kubenswrapper[4721]: healthz check failed Dec 02 11:53:47 crc kubenswrapper[4721]: I1202 11:53:47.986319 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtlwb" podUID="a26573b3-3899-4b81-ba01-fabea24142cc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 11:53:47 crc kubenswrapper[4721]: I1202 11:53:47.996863 4721 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f43f9793-56f4-4898-a0ae-bfcc5d3218c2-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 02 11:53:47 crc kubenswrapper[4721]: I1202 11:53:47.996904 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f43f9793-56f4-4898-a0ae-bfcc5d3218c2-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 11:53:48 crc kubenswrapper[4721]: I1202 11:53:48.198002 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 02 11:53:48 crc kubenswrapper[4721]: W1202 11:53:48.236167 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podcde3f168_2b2d_4bf1_b886_1020b346a8cb.slice/crio-6a63230a99a804d3fe8f784fd7a762928b43d0dd8774c4758d96dd7eb39ddc28 WatchSource:0}: Error finding container 6a63230a99a804d3fe8f784fd7a762928b43d0dd8774c4758d96dd7eb39ddc28: Status 404 returned error can't find the container with id 6a63230a99a804d3fe8f784fd7a762928b43d0dd8774c4758d96dd7eb39ddc28 Dec 02 11:53:48 crc kubenswrapper[4721]: I1202 11:53:48.569443 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"cde3f168-2b2d-4bf1-b886-1020b346a8cb","Type":"ContainerStarted","Data":"6a63230a99a804d3fe8f784fd7a762928b43d0dd8774c4758d96dd7eb39ddc28"} Dec 02 11:53:48 crc kubenswrapper[4721]: I1202 11:53:48.573421 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"f43f9793-56f4-4898-a0ae-bfcc5d3218c2","Type":"ContainerDied","Data":"331e21b51c74cdcb75ca0899583fbc41cb81e72cb5b78b966e6ec0a6255600b3"} Dec 02 11:53:48 crc kubenswrapper[4721]: I1202 11:53:48.573472 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="331e21b51c74cdcb75ca0899583fbc41cb81e72cb5b78b966e6ec0a6255600b3" Dec 02 11:53:48 crc kubenswrapper[4721]: I1202 11:53:48.573482 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 11:53:48 crc kubenswrapper[4721]: I1202 11:53:48.985247 4721 patch_prober.go:28] interesting pod/router-default-5444994796-wtlwb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 11:53:48 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Dec 02 11:53:48 crc kubenswrapper[4721]: [+]process-running ok Dec 02 11:53:48 crc kubenswrapper[4721]: healthz check failed Dec 02 11:53:48 crc kubenswrapper[4721]: I1202 11:53:48.985325 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtlwb" podUID="a26573b3-3899-4b81-ba01-fabea24142cc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 11:53:49 crc kubenswrapper[4721]: I1202 11:53:49.593099 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"cde3f168-2b2d-4bf1-b886-1020b346a8cb","Type":"ContainerStarted","Data":"c5725b30c7a58a9aa05227b30fec8caf42aff45dc54e6f1d5b99c1b3045a3030"} Dec 02 11:53:49 crc kubenswrapper[4721]: I1202 11:53:49.609809 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.609789363 podStartE2EDuration="2.609789363s" podCreationTimestamp="2025-12-02 11:53:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:53:49.605387834 +0000 UTC m=+159.343938173" watchObservedRunningTime="2025-12-02 11:53:49.609789363 +0000 UTC m=+159.348339712" Dec 02 11:53:49 crc kubenswrapper[4721]: I1202 11:53:49.984965 4721 patch_prober.go:28] interesting pod/router-default-5444994796-wtlwb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 11:53:49 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Dec 02 11:53:49 crc kubenswrapper[4721]: [+]process-running ok Dec 02 11:53:49 crc kubenswrapper[4721]: healthz check failed Dec 02 11:53:49 crc kubenswrapper[4721]: I1202 11:53:49.985023 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtlwb" podUID="a26573b3-3899-4b81-ba01-fabea24142cc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 11:53:50 crc kubenswrapper[4721]: I1202 11:53:50.057058 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-n2cvg" Dec 02 11:53:50 crc kubenswrapper[4721]: I1202 11:53:50.601685 4721 generic.go:334] "Generic (PLEG): container finished" podID="cde3f168-2b2d-4bf1-b886-1020b346a8cb" containerID="c5725b30c7a58a9aa05227b30fec8caf42aff45dc54e6f1d5b99c1b3045a3030" exitCode=0 Dec 02 11:53:50 crc kubenswrapper[4721]: I1202 11:53:50.601732 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"cde3f168-2b2d-4bf1-b886-1020b346a8cb","Type":"ContainerDied","Data":"c5725b30c7a58a9aa05227b30fec8caf42aff45dc54e6f1d5b99c1b3045a3030"} Dec 02 11:53:50 crc kubenswrapper[4721]: I1202 11:53:50.985274 4721 patch_prober.go:28] interesting pod/router-default-5444994796-wtlwb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 11:53:50 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Dec 02 11:53:50 crc kubenswrapper[4721]: [+]process-running ok Dec 02 11:53:50 crc kubenswrapper[4721]: healthz check failed Dec 02 11:53:50 crc kubenswrapper[4721]: I1202 11:53:50.985593 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtlwb" podUID="a26573b3-3899-4b81-ba01-fabea24142cc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 11:53:51 crc kubenswrapper[4721]: I1202 11:53:51.985357 4721 patch_prober.go:28] interesting pod/router-default-5444994796-wtlwb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 11:53:51 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Dec 02 11:53:51 crc kubenswrapper[4721]: [+]process-running ok Dec 02 11:53:51 crc kubenswrapper[4721]: healthz check failed Dec 02 11:53:51 crc kubenswrapper[4721]: I1202 11:53:51.985436 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtlwb" podUID="a26573b3-3899-4b81-ba01-fabea24142cc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 11:53:52 crc kubenswrapper[4721]: I1202 11:53:52.673835 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-metrics-certs\") pod \"network-metrics-daemon-qf9jp\" (UID: \"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\") " pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:53:52 crc kubenswrapper[4721]: I1202 11:53:52.694462 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c1b39d16-c46b-40dc-a2a6-600f96eb93f7-metrics-certs\") pod \"network-metrics-daemon-qf9jp\" (UID: \"c1b39d16-c46b-40dc-a2a6-600f96eb93f7\") " pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:53:52 crc kubenswrapper[4721]: I1202 11:53:52.957926 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qf9jp" Dec 02 11:53:52 crc kubenswrapper[4721]: I1202 11:53:52.984300 4721 patch_prober.go:28] interesting pod/router-default-5444994796-wtlwb container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 11:53:52 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Dec 02 11:53:52 crc kubenswrapper[4721]: [+]process-running ok Dec 02 11:53:52 crc kubenswrapper[4721]: healthz check failed Dec 02 11:53:52 crc kubenswrapper[4721]: I1202 11:53:52.984381 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wtlwb" podUID="a26573b3-3899-4b81-ba01-fabea24142cc" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 11:53:53 crc kubenswrapper[4721]: I1202 11:53:53.986887 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-wtlwb" Dec 02 11:53:53 crc kubenswrapper[4721]: I1202 11:53:53.990511 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-wtlwb" Dec 02 11:53:54 crc kubenswrapper[4721]: I1202 11:53:54.171564 4721 patch_prober.go:28] interesting pod/console-f9d7485db-tl5t9 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.16:8443/health\": dial tcp 10.217.0.16:8443: connect: connection refused" start-of-body= Dec 02 11:53:54 crc kubenswrapper[4721]: I1202 11:53:54.171989 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-tl5t9" podUID="54beb89c-d2c3-42c8-a186-465ebcbb4e4b" containerName="console" probeResult="failure" output="Get \"https://10.217.0.16:8443/health\": dial tcp 10.217.0.16:8443: connect: connection refused" Dec 02 11:53:54 crc kubenswrapper[4721]: I1202 11:53:54.295552 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-qx7nr" Dec 02 11:53:54 crc kubenswrapper[4721]: I1202 11:53:54.861816 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 11:53:55 crc kubenswrapper[4721]: I1202 11:53:55.002757 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cde3f168-2b2d-4bf1-b886-1020b346a8cb-kube-api-access\") pod \"cde3f168-2b2d-4bf1-b886-1020b346a8cb\" (UID: \"cde3f168-2b2d-4bf1-b886-1020b346a8cb\") " Dec 02 11:53:55 crc kubenswrapper[4721]: I1202 11:53:55.002857 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/cde3f168-2b2d-4bf1-b886-1020b346a8cb-kubelet-dir\") pod \"cde3f168-2b2d-4bf1-b886-1020b346a8cb\" (UID: \"cde3f168-2b2d-4bf1-b886-1020b346a8cb\") " Dec 02 11:53:55 crc kubenswrapper[4721]: I1202 11:53:55.003015 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cde3f168-2b2d-4bf1-b886-1020b346a8cb-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "cde3f168-2b2d-4bf1-b886-1020b346a8cb" (UID: "cde3f168-2b2d-4bf1-b886-1020b346a8cb"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 11:53:55 crc kubenswrapper[4721]: I1202 11:53:55.003265 4721 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/cde3f168-2b2d-4bf1-b886-1020b346a8cb-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 02 11:53:55 crc kubenswrapper[4721]: I1202 11:53:55.007674 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cde3f168-2b2d-4bf1-b886-1020b346a8cb-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "cde3f168-2b2d-4bf1-b886-1020b346a8cb" (UID: "cde3f168-2b2d-4bf1-b886-1020b346a8cb"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:53:55 crc kubenswrapper[4721]: I1202 11:53:55.104327 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cde3f168-2b2d-4bf1-b886-1020b346a8cb-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 11:53:55 crc kubenswrapper[4721]: I1202 11:53:55.640237 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"cde3f168-2b2d-4bf1-b886-1020b346a8cb","Type":"ContainerDied","Data":"6a63230a99a804d3fe8f784fd7a762928b43d0dd8774c4758d96dd7eb39ddc28"} Dec 02 11:53:55 crc kubenswrapper[4721]: I1202 11:53:55.640291 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a63230a99a804d3fe8f784fd7a762928b43d0dd8774c4758d96dd7eb39ddc28" Dec 02 11:53:55 crc kubenswrapper[4721]: I1202 11:53:55.640387 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 11:53:55 crc kubenswrapper[4721]: I1202 11:53:55.703468 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mcr6s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 11:53:55 crc kubenswrapper[4721]: I1202 11:53:55.703583 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 11:54:01 crc kubenswrapper[4721]: I1202 11:54:01.493962 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:54:04 crc kubenswrapper[4721]: I1202 11:54:04.174905 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-tl5t9" Dec 02 11:54:04 crc kubenswrapper[4721]: I1202 11:54:04.181942 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-tl5t9" Dec 02 11:54:14 crc kubenswrapper[4721]: I1202 11:54:14.958705 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-v6mzl" Dec 02 11:54:17 crc kubenswrapper[4721]: E1202 11:54:17.350512 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 02 11:54:17 crc kubenswrapper[4721]: E1202 11:54:17.351011 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bpxgx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-4k25r_openshift-marketplace(f1e35ec2-7693-4103-97e1-3304b8ac93f4): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 11:54:17 crc kubenswrapper[4721]: E1202 11:54:17.352194 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-4k25r" podUID="f1e35ec2-7693-4103-97e1-3304b8ac93f4" Dec 02 11:54:18 crc kubenswrapper[4721]: I1202 11:54:18.068632 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 11:54:18 crc kubenswrapper[4721]: E1202 11:54:18.540233 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-4k25r" podUID="f1e35ec2-7693-4103-97e1-3304b8ac93f4" Dec 02 11:54:18 crc kubenswrapper[4721]: E1202 11:54:18.600884 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 02 11:54:18 crc kubenswrapper[4721]: E1202 11:54:18.601033 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6nc77,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-69prw_openshift-marketplace(e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 11:54:18 crc kubenswrapper[4721]: E1202 11:54:18.602207 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-69prw" podUID="e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d" Dec 02 11:54:18 crc kubenswrapper[4721]: E1202 11:54:18.657231 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 02 11:54:18 crc kubenswrapper[4721]: E1202 11:54:18.657437 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5hrbj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-wpd4r_openshift-marketplace(9610e93f-3f1c-4f61-a171-2e9739620178): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 11:54:18 crc kubenswrapper[4721]: E1202 11:54:18.658614 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-wpd4r" podUID="9610e93f-3f1c-4f61-a171-2e9739620178" Dec 02 11:54:19 crc kubenswrapper[4721]: E1202 11:54:19.667714 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-69prw" podUID="e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d" Dec 02 11:54:19 crc kubenswrapper[4721]: E1202 11:54:19.669931 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-wpd4r" podUID="9610e93f-3f1c-4f61-a171-2e9739620178" Dec 02 11:54:19 crc kubenswrapper[4721]: E1202 11:54:19.748712 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 02 11:54:19 crc kubenswrapper[4721]: E1202 11:54:19.749132 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bglfp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-4qzrw_openshift-marketplace(5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 11:54:19 crc kubenswrapper[4721]: E1202 11:54:19.751708 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 02 11:54:19 crc kubenswrapper[4721]: E1202 11:54:19.751803 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-d84nk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-7l4c7_openshift-marketplace(4dddd92c-299e-49fa-8c1b-e68c31f2a743): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 11:54:19 crc kubenswrapper[4721]: E1202 11:54:19.751871 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-4qzrw" podUID="5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44" Dec 02 11:54:19 crc kubenswrapper[4721]: E1202 11:54:19.753056 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-7l4c7" podUID="4dddd92c-299e-49fa-8c1b-e68c31f2a743" Dec 02 11:54:19 crc kubenswrapper[4721]: E1202 11:54:19.769885 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-7l4c7" podUID="4dddd92c-299e-49fa-8c1b-e68c31f2a743" Dec 02 11:54:19 crc kubenswrapper[4721]: E1202 11:54:19.770142 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-4qzrw" podUID="5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44" Dec 02 11:54:19 crc kubenswrapper[4721]: E1202 11:54:19.778079 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 02 11:54:19 crc kubenswrapper[4721]: E1202 11:54:19.778267 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zpkpf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-8fw86_openshift-marketplace(94f45fa8-3517-4376-b5c0-8d6cf0254027): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 11:54:19 crc kubenswrapper[4721]: E1202 11:54:19.780104 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-8fw86" podUID="94f45fa8-3517-4376-b5c0-8d6cf0254027" Dec 02 11:54:20 crc kubenswrapper[4721]: I1202 11:54:20.069063 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-qf9jp"] Dec 02 11:54:20 crc kubenswrapper[4721]: W1202 11:54:20.073631 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc1b39d16_c46b_40dc_a2a6_600f96eb93f7.slice/crio-849e7bc34e26f8cfc54b89979b476219d941ff44c7d75a111ae83f2c5ada46c8 WatchSource:0}: Error finding container 849e7bc34e26f8cfc54b89979b476219d941ff44c7d75a111ae83f2c5ada46c8: Status 404 returned error can't find the container with id 849e7bc34e26f8cfc54b89979b476219d941ff44c7d75a111ae83f2c5ada46c8 Dec 02 11:54:20 crc kubenswrapper[4721]: I1202 11:54:20.778826 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-qf9jp" event={"ID":"c1b39d16-c46b-40dc-a2a6-600f96eb93f7","Type":"ContainerStarted","Data":"6002b3502a53f600842e143d6be43001b2be7f0ef22d70e71643068da69d761c"} Dec 02 11:54:20 crc kubenswrapper[4721]: I1202 11:54:20.778889 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-qf9jp" event={"ID":"c1b39d16-c46b-40dc-a2a6-600f96eb93f7","Type":"ContainerStarted","Data":"b533d8c5bdb615c9f36b925bd9c1f604a8b8f37a7ccf7d9aa4972150b0a90c23"} Dec 02 11:54:20 crc kubenswrapper[4721]: I1202 11:54:20.778905 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-qf9jp" event={"ID":"c1b39d16-c46b-40dc-a2a6-600f96eb93f7","Type":"ContainerStarted","Data":"849e7bc34e26f8cfc54b89979b476219d941ff44c7d75a111ae83f2c5ada46c8"} Dec 02 11:54:20 crc kubenswrapper[4721]: I1202 11:54:20.780856 4721 generic.go:334] "Generic (PLEG): container finished" podID="91923222-03a2-43ce-8382-75953af7dcfc" containerID="9c24f7bc98451e04099ce8d62e6b4efa05fd96971c2ec368eafcf3ffe259113c" exitCode=0 Dec 02 11:54:20 crc kubenswrapper[4721]: I1202 11:54:20.780954 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zxl2w" event={"ID":"91923222-03a2-43ce-8382-75953af7dcfc","Type":"ContainerDied","Data":"9c24f7bc98451e04099ce8d62e6b4efa05fd96971c2ec368eafcf3ffe259113c"} Dec 02 11:54:20 crc kubenswrapper[4721]: I1202 11:54:20.784465 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-24l7f" event={"ID":"9a704275-5d23-4f38-805d-e0f8be9b50dc","Type":"ContainerDied","Data":"a7b83b3ca582c1246dfce09664809edc19a230a255bea275641edf7d02009d23"} Dec 02 11:54:20 crc kubenswrapper[4721]: I1202 11:54:20.784451 4721 generic.go:334] "Generic (PLEG): container finished" podID="9a704275-5d23-4f38-805d-e0f8be9b50dc" containerID="a7b83b3ca582c1246dfce09664809edc19a230a255bea275641edf7d02009d23" exitCode=0 Dec 02 11:54:20 crc kubenswrapper[4721]: E1202 11:54:20.788048 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-8fw86" podUID="94f45fa8-3517-4376-b5c0-8d6cf0254027" Dec 02 11:54:20 crc kubenswrapper[4721]: I1202 11:54:20.797157 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-qf9jp" podStartSLOduration=169.797134734 podStartE2EDuration="2m49.797134734s" podCreationTimestamp="2025-12-02 11:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:54:20.794551521 +0000 UTC m=+190.533101870" watchObservedRunningTime="2025-12-02 11:54:20.797134734 +0000 UTC m=+190.535685083" Dec 02 11:54:21 crc kubenswrapper[4721]: I1202 11:54:21.794926 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zxl2w" event={"ID":"91923222-03a2-43ce-8382-75953af7dcfc","Type":"ContainerStarted","Data":"364f2ea54785a957a51ade5d06b3da8c4e1f056e042f25cca1e034457d4cd5f5"} Dec 02 11:54:21 crc kubenswrapper[4721]: I1202 11:54:21.796586 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-24l7f" event={"ID":"9a704275-5d23-4f38-805d-e0f8be9b50dc","Type":"ContainerStarted","Data":"2bd09f7eacf82d6f397f3ed03f094c38f9a4881e9feef857912e824ba14450f0"} Dec 02 11:54:21 crc kubenswrapper[4721]: I1202 11:54:21.812516 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zxl2w" podStartSLOduration=3.130446951 podStartE2EDuration="37.81250198s" podCreationTimestamp="2025-12-02 11:53:44 +0000 UTC" firstStartedPulling="2025-12-02 11:53:46.5407972 +0000 UTC m=+156.279347539" lastFinishedPulling="2025-12-02 11:54:21.222852218 +0000 UTC m=+190.961402568" observedRunningTime="2025-12-02 11:54:21.812074486 +0000 UTC m=+191.550624835" watchObservedRunningTime="2025-12-02 11:54:21.81250198 +0000 UTC m=+191.551052319" Dec 02 11:54:21 crc kubenswrapper[4721]: I1202 11:54:21.830952 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-24l7f" podStartSLOduration=1.928305125 podStartE2EDuration="37.830933062s" podCreationTimestamp="2025-12-02 11:53:44 +0000 UTC" firstStartedPulling="2025-12-02 11:53:45.50699163 +0000 UTC m=+155.245541979" lastFinishedPulling="2025-12-02 11:54:21.409619567 +0000 UTC m=+191.148169916" observedRunningTime="2025-12-02 11:54:21.826265595 +0000 UTC m=+191.564815944" watchObservedRunningTime="2025-12-02 11:54:21.830933062 +0000 UTC m=+191.569483421" Dec 02 11:54:23 crc kubenswrapper[4721]: I1202 11:54:23.259471 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 02 11:54:23 crc kubenswrapper[4721]: E1202 11:54:23.260355 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cde3f168-2b2d-4bf1-b886-1020b346a8cb" containerName="pruner" Dec 02 11:54:23 crc kubenswrapper[4721]: I1202 11:54:23.260371 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="cde3f168-2b2d-4bf1-b886-1020b346a8cb" containerName="pruner" Dec 02 11:54:23 crc kubenswrapper[4721]: E1202 11:54:23.260402 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f43f9793-56f4-4898-a0ae-bfcc5d3218c2" containerName="pruner" Dec 02 11:54:23 crc kubenswrapper[4721]: I1202 11:54:23.260410 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f43f9793-56f4-4898-a0ae-bfcc5d3218c2" containerName="pruner" Dec 02 11:54:23 crc kubenswrapper[4721]: I1202 11:54:23.260519 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="cde3f168-2b2d-4bf1-b886-1020b346a8cb" containerName="pruner" Dec 02 11:54:23 crc kubenswrapper[4721]: I1202 11:54:23.260536 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f43f9793-56f4-4898-a0ae-bfcc5d3218c2" containerName="pruner" Dec 02 11:54:23 crc kubenswrapper[4721]: I1202 11:54:23.261015 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 11:54:23 crc kubenswrapper[4721]: I1202 11:54:23.263429 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 02 11:54:23 crc kubenswrapper[4721]: I1202 11:54:23.263741 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 02 11:54:23 crc kubenswrapper[4721]: I1202 11:54:23.268392 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 02 11:54:23 crc kubenswrapper[4721]: I1202 11:54:23.417133 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3d4c9a8d-1389-48b3-8393-c08cacb9f639-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"3d4c9a8d-1389-48b3-8393-c08cacb9f639\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 11:54:23 crc kubenswrapper[4721]: I1202 11:54:23.417198 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3d4c9a8d-1389-48b3-8393-c08cacb9f639-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"3d4c9a8d-1389-48b3-8393-c08cacb9f639\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 11:54:23 crc kubenswrapper[4721]: I1202 11:54:23.517882 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3d4c9a8d-1389-48b3-8393-c08cacb9f639-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"3d4c9a8d-1389-48b3-8393-c08cacb9f639\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 11:54:23 crc kubenswrapper[4721]: I1202 11:54:23.517983 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3d4c9a8d-1389-48b3-8393-c08cacb9f639-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"3d4c9a8d-1389-48b3-8393-c08cacb9f639\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 11:54:23 crc kubenswrapper[4721]: I1202 11:54:23.518369 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3d4c9a8d-1389-48b3-8393-c08cacb9f639-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"3d4c9a8d-1389-48b3-8393-c08cacb9f639\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 11:54:23 crc kubenswrapper[4721]: I1202 11:54:23.536590 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3d4c9a8d-1389-48b3-8393-c08cacb9f639-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"3d4c9a8d-1389-48b3-8393-c08cacb9f639\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 11:54:23 crc kubenswrapper[4721]: I1202 11:54:23.594599 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 11:54:23 crc kubenswrapper[4721]: I1202 11:54:23.804834 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 02 11:54:23 crc kubenswrapper[4721]: W1202 11:54:23.829694 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod3d4c9a8d_1389_48b3_8393_c08cacb9f639.slice/crio-9e9604b4b0861bfc74e18ae7f0187c7b92c4b6f4ccaed504dcc525846ed55b3d WatchSource:0}: Error finding container 9e9604b4b0861bfc74e18ae7f0187c7b92c4b6f4ccaed504dcc525846ed55b3d: Status 404 returned error can't find the container with id 9e9604b4b0861bfc74e18ae7f0187c7b92c4b6f4ccaed504dcc525846ed55b3d Dec 02 11:54:24 crc kubenswrapper[4721]: I1202 11:54:24.830826 4721 generic.go:334] "Generic (PLEG): container finished" podID="3d4c9a8d-1389-48b3-8393-c08cacb9f639" containerID="e994513a134c27dc545b0436815d687fa978d4359bdc8efa75840f7c95d262c5" exitCode=0 Dec 02 11:54:24 crc kubenswrapper[4721]: I1202 11:54:24.830939 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"3d4c9a8d-1389-48b3-8393-c08cacb9f639","Type":"ContainerDied","Data":"e994513a134c27dc545b0436815d687fa978d4359bdc8efa75840f7c95d262c5"} Dec 02 11:54:24 crc kubenswrapper[4721]: I1202 11:54:24.831221 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"3d4c9a8d-1389-48b3-8393-c08cacb9f639","Type":"ContainerStarted","Data":"9e9604b4b0861bfc74e18ae7f0187c7b92c4b6f4ccaed504dcc525846ed55b3d"} Dec 02 11:54:24 crc kubenswrapper[4721]: I1202 11:54:24.883034 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-24l7f" Dec 02 11:54:24 crc kubenswrapper[4721]: I1202 11:54:24.883261 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-24l7f" Dec 02 11:54:25 crc kubenswrapper[4721]: I1202 11:54:25.280888 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zxl2w" Dec 02 11:54:25 crc kubenswrapper[4721]: I1202 11:54:25.281337 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zxl2w" Dec 02 11:54:25 crc kubenswrapper[4721]: I1202 11:54:25.703206 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mcr6s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 11:54:25 crc kubenswrapper[4721]: I1202 11:54:25.703264 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 11:54:25 crc kubenswrapper[4721]: I1202 11:54:25.996073 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-24l7f" podUID="9a704275-5d23-4f38-805d-e0f8be9b50dc" containerName="registry-server" probeResult="failure" output=< Dec 02 11:54:25 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Dec 02 11:54:25 crc kubenswrapper[4721]: > Dec 02 11:54:26 crc kubenswrapper[4721]: I1202 11:54:26.079086 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 11:54:26 crc kubenswrapper[4721]: I1202 11:54:26.155431 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3d4c9a8d-1389-48b3-8393-c08cacb9f639-kubelet-dir\") pod \"3d4c9a8d-1389-48b3-8393-c08cacb9f639\" (UID: \"3d4c9a8d-1389-48b3-8393-c08cacb9f639\") " Dec 02 11:54:26 crc kubenswrapper[4721]: I1202 11:54:26.155527 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3d4c9a8d-1389-48b3-8393-c08cacb9f639-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "3d4c9a8d-1389-48b3-8393-c08cacb9f639" (UID: "3d4c9a8d-1389-48b3-8393-c08cacb9f639"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 11:54:26 crc kubenswrapper[4721]: I1202 11:54:26.155587 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3d4c9a8d-1389-48b3-8393-c08cacb9f639-kube-api-access\") pod \"3d4c9a8d-1389-48b3-8393-c08cacb9f639\" (UID: \"3d4c9a8d-1389-48b3-8393-c08cacb9f639\") " Dec 02 11:54:26 crc kubenswrapper[4721]: I1202 11:54:26.155903 4721 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3d4c9a8d-1389-48b3-8393-c08cacb9f639-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:26 crc kubenswrapper[4721]: I1202 11:54:26.165570 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d4c9a8d-1389-48b3-8393-c08cacb9f639-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "3d4c9a8d-1389-48b3-8393-c08cacb9f639" (UID: "3d4c9a8d-1389-48b3-8393-c08cacb9f639"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:54:26 crc kubenswrapper[4721]: I1202 11:54:26.257293 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3d4c9a8d-1389-48b3-8393-c08cacb9f639-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:26 crc kubenswrapper[4721]: I1202 11:54:26.354915 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-zxl2w" podUID="91923222-03a2-43ce-8382-75953af7dcfc" containerName="registry-server" probeResult="failure" output=< Dec 02 11:54:26 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Dec 02 11:54:26 crc kubenswrapper[4721]: > Dec 02 11:54:26 crc kubenswrapper[4721]: I1202 11:54:26.851418 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 11:54:26 crc kubenswrapper[4721]: I1202 11:54:26.858027 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"3d4c9a8d-1389-48b3-8393-c08cacb9f639","Type":"ContainerDied","Data":"9e9604b4b0861bfc74e18ae7f0187c7b92c4b6f4ccaed504dcc525846ed55b3d"} Dec 02 11:54:26 crc kubenswrapper[4721]: I1202 11:54:26.858066 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9e9604b4b0861bfc74e18ae7f0187c7b92c4b6f4ccaed504dcc525846ed55b3d" Dec 02 11:54:28 crc kubenswrapper[4721]: I1202 11:54:28.055748 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 02 11:54:28 crc kubenswrapper[4721]: E1202 11:54:28.055951 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d4c9a8d-1389-48b3-8393-c08cacb9f639" containerName="pruner" Dec 02 11:54:28 crc kubenswrapper[4721]: I1202 11:54:28.055962 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d4c9a8d-1389-48b3-8393-c08cacb9f639" containerName="pruner" Dec 02 11:54:28 crc kubenswrapper[4721]: I1202 11:54:28.056051 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d4c9a8d-1389-48b3-8393-c08cacb9f639" containerName="pruner" Dec 02 11:54:28 crc kubenswrapper[4721]: I1202 11:54:28.056387 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 02 11:54:28 crc kubenswrapper[4721]: I1202 11:54:28.059085 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 02 11:54:28 crc kubenswrapper[4721]: I1202 11:54:28.059551 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 02 11:54:28 crc kubenswrapper[4721]: I1202 11:54:28.071513 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 02 11:54:28 crc kubenswrapper[4721]: I1202 11:54:28.180525 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/855a31ab-b04f-4907-be30-115efd42a84e-kube-api-access\") pod \"installer-9-crc\" (UID: \"855a31ab-b04f-4907-be30-115efd42a84e\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 11:54:28 crc kubenswrapper[4721]: I1202 11:54:28.180574 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/855a31ab-b04f-4907-be30-115efd42a84e-kubelet-dir\") pod \"installer-9-crc\" (UID: \"855a31ab-b04f-4907-be30-115efd42a84e\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 11:54:28 crc kubenswrapper[4721]: I1202 11:54:28.180634 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/855a31ab-b04f-4907-be30-115efd42a84e-var-lock\") pod \"installer-9-crc\" (UID: \"855a31ab-b04f-4907-be30-115efd42a84e\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 11:54:28 crc kubenswrapper[4721]: I1202 11:54:28.282197 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/855a31ab-b04f-4907-be30-115efd42a84e-var-lock\") pod \"installer-9-crc\" (UID: \"855a31ab-b04f-4907-be30-115efd42a84e\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 11:54:28 crc kubenswrapper[4721]: I1202 11:54:28.282262 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/855a31ab-b04f-4907-be30-115efd42a84e-kube-api-access\") pod \"installer-9-crc\" (UID: \"855a31ab-b04f-4907-be30-115efd42a84e\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 11:54:28 crc kubenswrapper[4721]: I1202 11:54:28.282292 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/855a31ab-b04f-4907-be30-115efd42a84e-kubelet-dir\") pod \"installer-9-crc\" (UID: \"855a31ab-b04f-4907-be30-115efd42a84e\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 11:54:28 crc kubenswrapper[4721]: I1202 11:54:28.282367 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/855a31ab-b04f-4907-be30-115efd42a84e-kubelet-dir\") pod \"installer-9-crc\" (UID: \"855a31ab-b04f-4907-be30-115efd42a84e\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 11:54:28 crc kubenswrapper[4721]: I1202 11:54:28.282361 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/855a31ab-b04f-4907-be30-115efd42a84e-var-lock\") pod \"installer-9-crc\" (UID: \"855a31ab-b04f-4907-be30-115efd42a84e\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 11:54:28 crc kubenswrapper[4721]: I1202 11:54:28.298862 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/855a31ab-b04f-4907-be30-115efd42a84e-kube-api-access\") pod \"installer-9-crc\" (UID: \"855a31ab-b04f-4907-be30-115efd42a84e\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 11:54:28 crc kubenswrapper[4721]: I1202 11:54:28.380116 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 02 11:54:28 crc kubenswrapper[4721]: I1202 11:54:28.556594 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 02 11:54:28 crc kubenswrapper[4721]: I1202 11:54:28.863035 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"855a31ab-b04f-4907-be30-115efd42a84e","Type":"ContainerStarted","Data":"57a9571429eeaf66cc9387c7d2880c276e210d6cccba4e63624130a247621d9e"} Dec 02 11:54:29 crc kubenswrapper[4721]: I1202 11:54:29.868660 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"855a31ab-b04f-4907-be30-115efd42a84e","Type":"ContainerStarted","Data":"0ec70084d6befc4cbf2d97a3805cd46931b6d600b74e64701bf74bba019aa92f"} Dec 02 11:54:29 crc kubenswrapper[4721]: I1202 11:54:29.881952 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=1.8819335480000001 podStartE2EDuration="1.881933548s" podCreationTimestamp="2025-12-02 11:54:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:54:29.879418861 +0000 UTC m=+199.617969210" watchObservedRunningTime="2025-12-02 11:54:29.881933548 +0000 UTC m=+199.620483897" Dec 02 11:54:34 crc kubenswrapper[4721]: I1202 11:54:34.026660 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-wb9r7"] Dec 02 11:54:34 crc kubenswrapper[4721]: I1202 11:54:34.931860 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-24l7f" Dec 02 11:54:34 crc kubenswrapper[4721]: I1202 11:54:34.972540 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-24l7f" Dec 02 11:54:35 crc kubenswrapper[4721]: I1202 11:54:35.328321 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zxl2w" Dec 02 11:54:35 crc kubenswrapper[4721]: I1202 11:54:35.388970 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zxl2w" Dec 02 11:54:36 crc kubenswrapper[4721]: I1202 11:54:36.438324 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-fg2ch"] Dec 02 11:54:36 crc kubenswrapper[4721]: I1202 11:54:36.438782 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-fg2ch" podUID="c02678af-0ebc-4689-ad5c-1300cb95439e" containerName="controller-manager" containerID="cri-o://6ef1462510924be5e6b16aac7a5cea0c7ec789fdc3cac653d29440f9b28ce12e" gracePeriod=30 Dec 02 11:54:36 crc kubenswrapper[4721]: I1202 11:54:36.537473 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl"] Dec 02 11:54:36 crc kubenswrapper[4721]: I1202 11:54:36.537681 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl" podUID="1f4e7fae-d9a4-4f20-8658-4544860eaced" containerName="route-controller-manager" containerID="cri-o://be86e2e22779a32b69dd75add2f84c7d7f50c0005c3c4fdfaeef37b9a1518daa" gracePeriod=30 Dec 02 11:54:36 crc kubenswrapper[4721]: I1202 11:54:36.824613 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-fg2ch" Dec 02 11:54:36 crc kubenswrapper[4721]: I1202 11:54:36.928910 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl" Dec 02 11:54:36 crc kubenswrapper[4721]: I1202 11:54:36.994383 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bz9c5\" (UniqueName: \"kubernetes.io/projected/c02678af-0ebc-4689-ad5c-1300cb95439e-kube-api-access-bz9c5\") pod \"c02678af-0ebc-4689-ad5c-1300cb95439e\" (UID: \"c02678af-0ebc-4689-ad5c-1300cb95439e\") " Dec 02 11:54:36 crc kubenswrapper[4721]: I1202 11:54:36.994606 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c02678af-0ebc-4689-ad5c-1300cb95439e-serving-cert\") pod \"c02678af-0ebc-4689-ad5c-1300cb95439e\" (UID: \"c02678af-0ebc-4689-ad5c-1300cb95439e\") " Dec 02 11:54:36 crc kubenswrapper[4721]: I1202 11:54:36.994714 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1f4e7fae-d9a4-4f20-8658-4544860eaced-client-ca\") pod \"1f4e7fae-d9a4-4f20-8658-4544860eaced\" (UID: \"1f4e7fae-d9a4-4f20-8658-4544860eaced\") " Dec 02 11:54:36 crc kubenswrapper[4721]: I1202 11:54:36.994807 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c02678af-0ebc-4689-ad5c-1300cb95439e-proxy-ca-bundles\") pod \"c02678af-0ebc-4689-ad5c-1300cb95439e\" (UID: \"c02678af-0ebc-4689-ad5c-1300cb95439e\") " Dec 02 11:54:36 crc kubenswrapper[4721]: I1202 11:54:36.994913 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m96xx\" (UniqueName: \"kubernetes.io/projected/1f4e7fae-d9a4-4f20-8658-4544860eaced-kube-api-access-m96xx\") pod \"1f4e7fae-d9a4-4f20-8658-4544860eaced\" (UID: \"1f4e7fae-d9a4-4f20-8658-4544860eaced\") " Dec 02 11:54:36 crc kubenswrapper[4721]: I1202 11:54:36.995000 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c02678af-0ebc-4689-ad5c-1300cb95439e-client-ca\") pod \"c02678af-0ebc-4689-ad5c-1300cb95439e\" (UID: \"c02678af-0ebc-4689-ad5c-1300cb95439e\") " Dec 02 11:54:36 crc kubenswrapper[4721]: I1202 11:54:36.995102 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c02678af-0ebc-4689-ad5c-1300cb95439e-config\") pod \"c02678af-0ebc-4689-ad5c-1300cb95439e\" (UID: \"c02678af-0ebc-4689-ad5c-1300cb95439e\") " Dec 02 11:54:36 crc kubenswrapper[4721]: I1202 11:54:36.995191 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f4e7fae-d9a4-4f20-8658-4544860eaced-config\") pod \"1f4e7fae-d9a4-4f20-8658-4544860eaced\" (UID: \"1f4e7fae-d9a4-4f20-8658-4544860eaced\") " Dec 02 11:54:36 crc kubenswrapper[4721]: I1202 11:54:36.995798 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c02678af-0ebc-4689-ad5c-1300cb95439e-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "c02678af-0ebc-4689-ad5c-1300cb95439e" (UID: "c02678af-0ebc-4689-ad5c-1300cb95439e"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:54:36 crc kubenswrapper[4721]: I1202 11:54:36.995840 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c02678af-0ebc-4689-ad5c-1300cb95439e-client-ca" (OuterVolumeSpecName: "client-ca") pod "c02678af-0ebc-4689-ad5c-1300cb95439e" (UID: "c02678af-0ebc-4689-ad5c-1300cb95439e"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:54:36 crc kubenswrapper[4721]: I1202 11:54:36.995928 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c02678af-0ebc-4689-ad5c-1300cb95439e-config" (OuterVolumeSpecName: "config") pod "c02678af-0ebc-4689-ad5c-1300cb95439e" (UID: "c02678af-0ebc-4689-ad5c-1300cb95439e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:54:36 crc kubenswrapper[4721]: I1202 11:54:36.996252 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f4e7fae-d9a4-4f20-8658-4544860eaced-config" (OuterVolumeSpecName: "config") pod "1f4e7fae-d9a4-4f20-8658-4544860eaced" (UID: "1f4e7fae-d9a4-4f20-8658-4544860eaced"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:54:36 crc kubenswrapper[4721]: I1202 11:54:36.996782 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f4e7fae-d9a4-4f20-8658-4544860eaced-client-ca" (OuterVolumeSpecName: "client-ca") pod "1f4e7fae-d9a4-4f20-8658-4544860eaced" (UID: "1f4e7fae-d9a4-4f20-8658-4544860eaced"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.000088 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f4e7fae-d9a4-4f20-8658-4544860eaced-kube-api-access-m96xx" (OuterVolumeSpecName: "kube-api-access-m96xx") pod "1f4e7fae-d9a4-4f20-8658-4544860eaced" (UID: "1f4e7fae-d9a4-4f20-8658-4544860eaced"). InnerVolumeSpecName "kube-api-access-m96xx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.000315 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c02678af-0ebc-4689-ad5c-1300cb95439e-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c02678af-0ebc-4689-ad5c-1300cb95439e" (UID: "c02678af-0ebc-4689-ad5c-1300cb95439e"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.000678 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c02678af-0ebc-4689-ad5c-1300cb95439e-kube-api-access-bz9c5" (OuterVolumeSpecName: "kube-api-access-bz9c5") pod "c02678af-0ebc-4689-ad5c-1300cb95439e" (UID: "c02678af-0ebc-4689-ad5c-1300cb95439e"). InnerVolumeSpecName "kube-api-access-bz9c5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.013223 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7l4c7" event={"ID":"4dddd92c-299e-49fa-8c1b-e68c31f2a743","Type":"ContainerDied","Data":"60324830e7b1e411a913d1eae277bac39ff38fc28b5cddadc6bbb2cf94bcf495"} Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.013244 4721 generic.go:334] "Generic (PLEG): container finished" podID="4dddd92c-299e-49fa-8c1b-e68c31f2a743" containerID="60324830e7b1e411a913d1eae277bac39ff38fc28b5cddadc6bbb2cf94bcf495" exitCode=0 Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.016583 4721 generic.go:334] "Generic (PLEG): container finished" podID="c02678af-0ebc-4689-ad5c-1300cb95439e" containerID="6ef1462510924be5e6b16aac7a5cea0c7ec789fdc3cac653d29440f9b28ce12e" exitCode=0 Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.016679 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-fg2ch" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.017068 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-fg2ch" event={"ID":"c02678af-0ebc-4689-ad5c-1300cb95439e","Type":"ContainerDied","Data":"6ef1462510924be5e6b16aac7a5cea0c7ec789fdc3cac653d29440f9b28ce12e"} Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.017104 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-fg2ch" event={"ID":"c02678af-0ebc-4689-ad5c-1300cb95439e","Type":"ContainerDied","Data":"5a73a4f9def9655d1ab4c09791dc006bffee5dc63ca09b1e45747dfe726eee45"} Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.017121 4721 scope.go:117] "RemoveContainer" containerID="6ef1462510924be5e6b16aac7a5cea0c7ec789fdc3cac653d29440f9b28ce12e" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.024750 4721 generic.go:334] "Generic (PLEG): container finished" podID="5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44" containerID="1a0341d448b0dcdedbd73e4b08e0c1023e2b0e0358acbe02b144494022be580d" exitCode=0 Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.024953 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4qzrw" event={"ID":"5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44","Type":"ContainerDied","Data":"1a0341d448b0dcdedbd73e4b08e0c1023e2b0e0358acbe02b144494022be580d"} Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.028008 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wpd4r" event={"ID":"9610e93f-3f1c-4f61-a171-2e9739620178","Type":"ContainerStarted","Data":"e4f5e3b41638ff1217704c220cacdb3f5dd6c0c516ac13d6613d7b3ff3b69c8b"} Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.033611 4721 generic.go:334] "Generic (PLEG): container finished" podID="1f4e7fae-d9a4-4f20-8658-4544860eaced" containerID="be86e2e22779a32b69dd75add2f84c7d7f50c0005c3c4fdfaeef37b9a1518daa" exitCode=0 Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.033639 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl" event={"ID":"1f4e7fae-d9a4-4f20-8658-4544860eaced","Type":"ContainerDied","Data":"be86e2e22779a32b69dd75add2f84c7d7f50c0005c3c4fdfaeef37b9a1518daa"} Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.033661 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl" event={"ID":"1f4e7fae-d9a4-4f20-8658-4544860eaced","Type":"ContainerDied","Data":"9b98fac2cee8c9550eacc6af151eb41f881d0d750ac0ebf9f30b82e55e6bbbdf"} Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.033712 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.042490 4721 scope.go:117] "RemoveContainer" containerID="6ef1462510924be5e6b16aac7a5cea0c7ec789fdc3cac653d29440f9b28ce12e" Dec 02 11:54:37 crc kubenswrapper[4721]: E1202 11:54:37.043734 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ef1462510924be5e6b16aac7a5cea0c7ec789fdc3cac653d29440f9b28ce12e\": container with ID starting with 6ef1462510924be5e6b16aac7a5cea0c7ec789fdc3cac653d29440f9b28ce12e not found: ID does not exist" containerID="6ef1462510924be5e6b16aac7a5cea0c7ec789fdc3cac653d29440f9b28ce12e" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.043827 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ef1462510924be5e6b16aac7a5cea0c7ec789fdc3cac653d29440f9b28ce12e"} err="failed to get container status \"6ef1462510924be5e6b16aac7a5cea0c7ec789fdc3cac653d29440f9b28ce12e\": rpc error: code = NotFound desc = could not find container \"6ef1462510924be5e6b16aac7a5cea0c7ec789fdc3cac653d29440f9b28ce12e\": container with ID starting with 6ef1462510924be5e6b16aac7a5cea0c7ec789fdc3cac653d29440f9b28ce12e not found: ID does not exist" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.043891 4721 scope.go:117] "RemoveContainer" containerID="be86e2e22779a32b69dd75add2f84c7d7f50c0005c3c4fdfaeef37b9a1518daa" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.072179 4721 scope.go:117] "RemoveContainer" containerID="be86e2e22779a32b69dd75add2f84c7d7f50c0005c3c4fdfaeef37b9a1518daa" Dec 02 11:54:37 crc kubenswrapper[4721]: E1202 11:54:37.072582 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be86e2e22779a32b69dd75add2f84c7d7f50c0005c3c4fdfaeef37b9a1518daa\": container with ID starting with be86e2e22779a32b69dd75add2f84c7d7f50c0005c3c4fdfaeef37b9a1518daa not found: ID does not exist" containerID="be86e2e22779a32b69dd75add2f84c7d7f50c0005c3c4fdfaeef37b9a1518daa" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.072737 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be86e2e22779a32b69dd75add2f84c7d7f50c0005c3c4fdfaeef37b9a1518daa"} err="failed to get container status \"be86e2e22779a32b69dd75add2f84c7d7f50c0005c3c4fdfaeef37b9a1518daa\": rpc error: code = NotFound desc = could not find container \"be86e2e22779a32b69dd75add2f84c7d7f50c0005c3c4fdfaeef37b9a1518daa\": container with ID starting with be86e2e22779a32b69dd75add2f84c7d7f50c0005c3c4fdfaeef37b9a1518daa not found: ID does not exist" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.095762 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f4e7fae-d9a4-4f20-8658-4544860eaced-serving-cert\") pod \"1f4e7fae-d9a4-4f20-8658-4544860eaced\" (UID: \"1f4e7fae-d9a4-4f20-8658-4544860eaced\") " Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.096408 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m96xx\" (UniqueName: \"kubernetes.io/projected/1f4e7fae-d9a4-4f20-8658-4544860eaced-kube-api-access-m96xx\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.096431 4721 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c02678af-0ebc-4689-ad5c-1300cb95439e-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.096444 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c02678af-0ebc-4689-ad5c-1300cb95439e-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.096454 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f4e7fae-d9a4-4f20-8658-4544860eaced-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.096465 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bz9c5\" (UniqueName: \"kubernetes.io/projected/c02678af-0ebc-4689-ad5c-1300cb95439e-kube-api-access-bz9c5\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.096474 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c02678af-0ebc-4689-ad5c-1300cb95439e-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.096486 4721 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1f4e7fae-d9a4-4f20-8658-4544860eaced-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.096496 4721 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c02678af-0ebc-4689-ad5c-1300cb95439e-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.101773 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zxl2w"] Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.102879 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zxl2w" podUID="91923222-03a2-43ce-8382-75953af7dcfc" containerName="registry-server" containerID="cri-o://364f2ea54785a957a51ade5d06b3da8c4e1f056e042f25cca1e034457d4cd5f5" gracePeriod=2 Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.119855 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f4e7fae-d9a4-4f20-8658-4544860eaced-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1f4e7fae-d9a4-4f20-8658-4544860eaced" (UID: "1f4e7fae-d9a4-4f20-8658-4544860eaced"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.131478 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-fg2ch"] Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.136852 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-fg2ch"] Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.197535 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f4e7fae-d9a4-4f20-8658-4544860eaced-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.371910 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl"] Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.379046 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-kfmnl"] Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.688917 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zxl2w" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.803581 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91923222-03a2-43ce-8382-75953af7dcfc-utilities\") pod \"91923222-03a2-43ce-8382-75953af7dcfc\" (UID: \"91923222-03a2-43ce-8382-75953af7dcfc\") " Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.803611 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91923222-03a2-43ce-8382-75953af7dcfc-catalog-content\") pod \"91923222-03a2-43ce-8382-75953af7dcfc\" (UID: \"91923222-03a2-43ce-8382-75953af7dcfc\") " Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.803665 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cgpw8\" (UniqueName: \"kubernetes.io/projected/91923222-03a2-43ce-8382-75953af7dcfc-kube-api-access-cgpw8\") pod \"91923222-03a2-43ce-8382-75953af7dcfc\" (UID: \"91923222-03a2-43ce-8382-75953af7dcfc\") " Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.804714 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91923222-03a2-43ce-8382-75953af7dcfc-utilities" (OuterVolumeSpecName: "utilities") pod "91923222-03a2-43ce-8382-75953af7dcfc" (UID: "91923222-03a2-43ce-8382-75953af7dcfc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.813905 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91923222-03a2-43ce-8382-75953af7dcfc-kube-api-access-cgpw8" (OuterVolumeSpecName: "kube-api-access-cgpw8") pod "91923222-03a2-43ce-8382-75953af7dcfc" (UID: "91923222-03a2-43ce-8382-75953af7dcfc"). InnerVolumeSpecName "kube-api-access-cgpw8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.849364 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-64f784f9d7-sd44p"] Dec 02 11:54:37 crc kubenswrapper[4721]: E1202 11:54:37.849635 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91923222-03a2-43ce-8382-75953af7dcfc" containerName="registry-server" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.849652 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="91923222-03a2-43ce-8382-75953af7dcfc" containerName="registry-server" Dec 02 11:54:37 crc kubenswrapper[4721]: E1202 11:54:37.849666 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c02678af-0ebc-4689-ad5c-1300cb95439e" containerName="controller-manager" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.849674 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="c02678af-0ebc-4689-ad5c-1300cb95439e" containerName="controller-manager" Dec 02 11:54:37 crc kubenswrapper[4721]: E1202 11:54:37.849688 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f4e7fae-d9a4-4f20-8658-4544860eaced" containerName="route-controller-manager" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.849696 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f4e7fae-d9a4-4f20-8658-4544860eaced" containerName="route-controller-manager" Dec 02 11:54:37 crc kubenswrapper[4721]: E1202 11:54:37.849709 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91923222-03a2-43ce-8382-75953af7dcfc" containerName="extract-content" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.849717 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="91923222-03a2-43ce-8382-75953af7dcfc" containerName="extract-content" Dec 02 11:54:37 crc kubenswrapper[4721]: E1202 11:54:37.849729 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91923222-03a2-43ce-8382-75953af7dcfc" containerName="extract-utilities" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.849737 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="91923222-03a2-43ce-8382-75953af7dcfc" containerName="extract-utilities" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.849856 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="c02678af-0ebc-4689-ad5c-1300cb95439e" containerName="controller-manager" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.849874 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f4e7fae-d9a4-4f20-8658-4544860eaced" containerName="route-controller-manager" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.849883 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="91923222-03a2-43ce-8382-75953af7dcfc" containerName="registry-server" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.850315 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-64f784f9d7-sd44p" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.852187 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.853372 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs"] Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.854171 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.855028 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.855168 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.855814 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.855940 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.856077 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.861730 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.861949 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.862176 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.862312 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.862701 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.862706 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.865283 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.865901 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs"] Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.869308 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-64f784f9d7-sd44p"] Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.904587 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cgpw8\" (UniqueName: \"kubernetes.io/projected/91923222-03a2-43ce-8382-75953af7dcfc-kube-api-access-cgpw8\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.904612 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91923222-03a2-43ce-8382-75953af7dcfc-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:37 crc kubenswrapper[4721]: I1202 11:54:37.922960 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91923222-03a2-43ce-8382-75953af7dcfc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "91923222-03a2-43ce-8382-75953af7dcfc" (UID: "91923222-03a2-43ce-8382-75953af7dcfc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.005804 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a5ee83ff-fcfc-4318-8968-e79a68e1173f-serving-cert\") pod \"route-controller-manager-655448c569-dnkfs\" (UID: \"a5ee83ff-fcfc-4318-8968-e79a68e1173f\") " pod="openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.005880 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jm2n\" (UniqueName: \"kubernetes.io/projected/a5ee83ff-fcfc-4318-8968-e79a68e1173f-kube-api-access-5jm2n\") pod \"route-controller-manager-655448c569-dnkfs\" (UID: \"a5ee83ff-fcfc-4318-8968-e79a68e1173f\") " pod="openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.005907 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4kxwz\" (UniqueName: \"kubernetes.io/projected/6c1054e6-3738-4f8c-b7a0-6e52d922b623-kube-api-access-4kxwz\") pod \"controller-manager-64f784f9d7-sd44p\" (UID: \"6c1054e6-3738-4f8c-b7a0-6e52d922b623\") " pod="openshift-controller-manager/controller-manager-64f784f9d7-sd44p" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.005949 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a5ee83ff-fcfc-4318-8968-e79a68e1173f-client-ca\") pod \"route-controller-manager-655448c569-dnkfs\" (UID: \"a5ee83ff-fcfc-4318-8968-e79a68e1173f\") " pod="openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.005976 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6c1054e6-3738-4f8c-b7a0-6e52d922b623-client-ca\") pod \"controller-manager-64f784f9d7-sd44p\" (UID: \"6c1054e6-3738-4f8c-b7a0-6e52d922b623\") " pod="openshift-controller-manager/controller-manager-64f784f9d7-sd44p" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.006008 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5ee83ff-fcfc-4318-8968-e79a68e1173f-config\") pod \"route-controller-manager-655448c569-dnkfs\" (UID: \"a5ee83ff-fcfc-4318-8968-e79a68e1173f\") " pod="openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.006023 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6c1054e6-3738-4f8c-b7a0-6e52d922b623-serving-cert\") pod \"controller-manager-64f784f9d7-sd44p\" (UID: \"6c1054e6-3738-4f8c-b7a0-6e52d922b623\") " pod="openshift-controller-manager/controller-manager-64f784f9d7-sd44p" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.006064 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c1054e6-3738-4f8c-b7a0-6e52d922b623-config\") pod \"controller-manager-64f784f9d7-sd44p\" (UID: \"6c1054e6-3738-4f8c-b7a0-6e52d922b623\") " pod="openshift-controller-manager/controller-manager-64f784f9d7-sd44p" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.006083 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6c1054e6-3738-4f8c-b7a0-6e52d922b623-proxy-ca-bundles\") pod \"controller-manager-64f784f9d7-sd44p\" (UID: \"6c1054e6-3738-4f8c-b7a0-6e52d922b623\") " pod="openshift-controller-manager/controller-manager-64f784f9d7-sd44p" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.006140 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91923222-03a2-43ce-8382-75953af7dcfc-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.041258 4721 generic.go:334] "Generic (PLEG): container finished" podID="e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d" containerID="4434ec90dc8402b85d6ec58f17fc8679fed278b122aafdd167704c57d887d0e7" exitCode=0 Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.041334 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-69prw" event={"ID":"e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d","Type":"ContainerDied","Data":"4434ec90dc8402b85d6ec58f17fc8679fed278b122aafdd167704c57d887d0e7"} Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.043869 4721 generic.go:334] "Generic (PLEG): container finished" podID="91923222-03a2-43ce-8382-75953af7dcfc" containerID="364f2ea54785a957a51ade5d06b3da8c4e1f056e042f25cca1e034457d4cd5f5" exitCode=0 Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.043912 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zxl2w" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.043940 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zxl2w" event={"ID":"91923222-03a2-43ce-8382-75953af7dcfc","Type":"ContainerDied","Data":"364f2ea54785a957a51ade5d06b3da8c4e1f056e042f25cca1e034457d4cd5f5"} Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.044159 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zxl2w" event={"ID":"91923222-03a2-43ce-8382-75953af7dcfc","Type":"ContainerDied","Data":"de7081aa2e06bb06058413a921a30a6ce49b878ab51c2281a7f81532bb52e43c"} Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.044181 4721 scope.go:117] "RemoveContainer" containerID="364f2ea54785a957a51ade5d06b3da8c4e1f056e042f25cca1e034457d4cd5f5" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.047843 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7l4c7" event={"ID":"4dddd92c-299e-49fa-8c1b-e68c31f2a743","Type":"ContainerStarted","Data":"5b6de36a171a5e09f2e921864e600aae567fde419dd016b150b055229c90b920"} Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.050462 4721 generic.go:334] "Generic (PLEG): container finished" podID="94f45fa8-3517-4376-b5c0-8d6cf0254027" containerID="54df233cac5ad696657cec8b16bf2d2e5377cbd8679c756cbda09974a469ec7b" exitCode=0 Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.050505 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8fw86" event={"ID":"94f45fa8-3517-4376-b5c0-8d6cf0254027","Type":"ContainerDied","Data":"54df233cac5ad696657cec8b16bf2d2e5377cbd8679c756cbda09974a469ec7b"} Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.055751 4721 generic.go:334] "Generic (PLEG): container finished" podID="9610e93f-3f1c-4f61-a171-2e9739620178" containerID="e4f5e3b41638ff1217704c220cacdb3f5dd6c0c516ac13d6613d7b3ff3b69c8b" exitCode=0 Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.055823 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wpd4r" event={"ID":"9610e93f-3f1c-4f61-a171-2e9739620178","Type":"ContainerDied","Data":"e4f5e3b41638ff1217704c220cacdb3f5dd6c0c516ac13d6613d7b3ff3b69c8b"} Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.058325 4721 generic.go:334] "Generic (PLEG): container finished" podID="f1e35ec2-7693-4103-97e1-3304b8ac93f4" containerID="ce97d6a116668e7e69ce778452e5b8b330d8642cf694e3969e94af7f404b34f2" exitCode=0 Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.058424 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4k25r" event={"ID":"f1e35ec2-7693-4103-97e1-3304b8ac93f4","Type":"ContainerDied","Data":"ce97d6a116668e7e69ce778452e5b8b330d8642cf694e3969e94af7f404b34f2"} Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.061409 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4qzrw" event={"ID":"5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44","Type":"ContainerStarted","Data":"fcf291d976dcf2bfb09e3035042d0f980b0d6761303b0297455c70a6e8966794"} Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.065717 4721 scope.go:117] "RemoveContainer" containerID="9c24f7bc98451e04099ce8d62e6b4efa05fd96971c2ec368eafcf3ffe259113c" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.095146 4721 scope.go:117] "RemoveContainer" containerID="cfac5c7ae0ada4ff9d421819065384f31525542d2d3adf625609eb6a8c5c3de6" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.108828 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7l4c7" podStartSLOduration=2.960125927 podStartE2EDuration="55.108811883s" podCreationTimestamp="2025-12-02 11:53:43 +0000 UTC" firstStartedPulling="2025-12-02 11:53:45.518145033 +0000 UTC m=+155.256695382" lastFinishedPulling="2025-12-02 11:54:37.666830989 +0000 UTC m=+207.405381338" observedRunningTime="2025-12-02 11:54:38.106373918 +0000 UTC m=+207.844924277" watchObservedRunningTime="2025-12-02 11:54:38.108811883 +0000 UTC m=+207.847362232" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.111410 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5ee83ff-fcfc-4318-8968-e79a68e1173f-config\") pod \"route-controller-manager-655448c569-dnkfs\" (UID: \"a5ee83ff-fcfc-4318-8968-e79a68e1173f\") " pod="openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.111568 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6c1054e6-3738-4f8c-b7a0-6e52d922b623-serving-cert\") pod \"controller-manager-64f784f9d7-sd44p\" (UID: \"6c1054e6-3738-4f8c-b7a0-6e52d922b623\") " pod="openshift-controller-manager/controller-manager-64f784f9d7-sd44p" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.111673 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c1054e6-3738-4f8c-b7a0-6e52d922b623-config\") pod \"controller-manager-64f784f9d7-sd44p\" (UID: \"6c1054e6-3738-4f8c-b7a0-6e52d922b623\") " pod="openshift-controller-manager/controller-manager-64f784f9d7-sd44p" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.111710 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6c1054e6-3738-4f8c-b7a0-6e52d922b623-proxy-ca-bundles\") pod \"controller-manager-64f784f9d7-sd44p\" (UID: \"6c1054e6-3738-4f8c-b7a0-6e52d922b623\") " pod="openshift-controller-manager/controller-manager-64f784f9d7-sd44p" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.111821 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a5ee83ff-fcfc-4318-8968-e79a68e1173f-serving-cert\") pod \"route-controller-manager-655448c569-dnkfs\" (UID: \"a5ee83ff-fcfc-4318-8968-e79a68e1173f\") " pod="openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.111882 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jm2n\" (UniqueName: \"kubernetes.io/projected/a5ee83ff-fcfc-4318-8968-e79a68e1173f-kube-api-access-5jm2n\") pod \"route-controller-manager-655448c569-dnkfs\" (UID: \"a5ee83ff-fcfc-4318-8968-e79a68e1173f\") " pod="openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.111923 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4kxwz\" (UniqueName: \"kubernetes.io/projected/6c1054e6-3738-4f8c-b7a0-6e52d922b623-kube-api-access-4kxwz\") pod \"controller-manager-64f784f9d7-sd44p\" (UID: \"6c1054e6-3738-4f8c-b7a0-6e52d922b623\") " pod="openshift-controller-manager/controller-manager-64f784f9d7-sd44p" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.111980 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a5ee83ff-fcfc-4318-8968-e79a68e1173f-client-ca\") pod \"route-controller-manager-655448c569-dnkfs\" (UID: \"a5ee83ff-fcfc-4318-8968-e79a68e1173f\") " pod="openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.112032 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6c1054e6-3738-4f8c-b7a0-6e52d922b623-client-ca\") pod \"controller-manager-64f784f9d7-sd44p\" (UID: \"6c1054e6-3738-4f8c-b7a0-6e52d922b623\") " pod="openshift-controller-manager/controller-manager-64f784f9d7-sd44p" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.116192 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6c1054e6-3738-4f8c-b7a0-6e52d922b623-proxy-ca-bundles\") pod \"controller-manager-64f784f9d7-sd44p\" (UID: \"6c1054e6-3738-4f8c-b7a0-6e52d922b623\") " pod="openshift-controller-manager/controller-manager-64f784f9d7-sd44p" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.116516 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a5ee83ff-fcfc-4318-8968-e79a68e1173f-client-ca\") pod \"route-controller-manager-655448c569-dnkfs\" (UID: \"a5ee83ff-fcfc-4318-8968-e79a68e1173f\") " pod="openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.117489 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c1054e6-3738-4f8c-b7a0-6e52d922b623-config\") pod \"controller-manager-64f784f9d7-sd44p\" (UID: \"6c1054e6-3738-4f8c-b7a0-6e52d922b623\") " pod="openshift-controller-manager/controller-manager-64f784f9d7-sd44p" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.117514 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5ee83ff-fcfc-4318-8968-e79a68e1173f-config\") pod \"route-controller-manager-655448c569-dnkfs\" (UID: \"a5ee83ff-fcfc-4318-8968-e79a68e1173f\") " pod="openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.121256 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a5ee83ff-fcfc-4318-8968-e79a68e1173f-serving-cert\") pod \"route-controller-manager-655448c569-dnkfs\" (UID: \"a5ee83ff-fcfc-4318-8968-e79a68e1173f\") " pod="openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.126975 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6c1054e6-3738-4f8c-b7a0-6e52d922b623-client-ca\") pod \"controller-manager-64f784f9d7-sd44p\" (UID: \"6c1054e6-3738-4f8c-b7a0-6e52d922b623\") " pod="openshift-controller-manager/controller-manager-64f784f9d7-sd44p" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.130596 4721 scope.go:117] "RemoveContainer" containerID="364f2ea54785a957a51ade5d06b3da8c4e1f056e042f25cca1e034457d4cd5f5" Dec 02 11:54:38 crc kubenswrapper[4721]: E1202 11:54:38.131093 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"364f2ea54785a957a51ade5d06b3da8c4e1f056e042f25cca1e034457d4cd5f5\": container with ID starting with 364f2ea54785a957a51ade5d06b3da8c4e1f056e042f25cca1e034457d4cd5f5 not found: ID does not exist" containerID="364f2ea54785a957a51ade5d06b3da8c4e1f056e042f25cca1e034457d4cd5f5" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.131130 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"364f2ea54785a957a51ade5d06b3da8c4e1f056e042f25cca1e034457d4cd5f5"} err="failed to get container status \"364f2ea54785a957a51ade5d06b3da8c4e1f056e042f25cca1e034457d4cd5f5\": rpc error: code = NotFound desc = could not find container \"364f2ea54785a957a51ade5d06b3da8c4e1f056e042f25cca1e034457d4cd5f5\": container with ID starting with 364f2ea54785a957a51ade5d06b3da8c4e1f056e042f25cca1e034457d4cd5f5 not found: ID does not exist" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.131160 4721 scope.go:117] "RemoveContainer" containerID="9c24f7bc98451e04099ce8d62e6b4efa05fd96971c2ec368eafcf3ffe259113c" Dec 02 11:54:38 crc kubenswrapper[4721]: E1202 11:54:38.131600 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c24f7bc98451e04099ce8d62e6b4efa05fd96971c2ec368eafcf3ffe259113c\": container with ID starting with 9c24f7bc98451e04099ce8d62e6b4efa05fd96971c2ec368eafcf3ffe259113c not found: ID does not exist" containerID="9c24f7bc98451e04099ce8d62e6b4efa05fd96971c2ec368eafcf3ffe259113c" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.131623 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c24f7bc98451e04099ce8d62e6b4efa05fd96971c2ec368eafcf3ffe259113c"} err="failed to get container status \"9c24f7bc98451e04099ce8d62e6b4efa05fd96971c2ec368eafcf3ffe259113c\": rpc error: code = NotFound desc = could not find container \"9c24f7bc98451e04099ce8d62e6b4efa05fd96971c2ec368eafcf3ffe259113c\": container with ID starting with 9c24f7bc98451e04099ce8d62e6b4efa05fd96971c2ec368eafcf3ffe259113c not found: ID does not exist" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.131636 4721 scope.go:117] "RemoveContainer" containerID="cfac5c7ae0ada4ff9d421819065384f31525542d2d3adf625609eb6a8c5c3de6" Dec 02 11:54:38 crc kubenswrapper[4721]: E1202 11:54:38.131859 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cfac5c7ae0ada4ff9d421819065384f31525542d2d3adf625609eb6a8c5c3de6\": container with ID starting with cfac5c7ae0ada4ff9d421819065384f31525542d2d3adf625609eb6a8c5c3de6 not found: ID does not exist" containerID="cfac5c7ae0ada4ff9d421819065384f31525542d2d3adf625609eb6a8c5c3de6" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.131878 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfac5c7ae0ada4ff9d421819065384f31525542d2d3adf625609eb6a8c5c3de6"} err="failed to get container status \"cfac5c7ae0ada4ff9d421819065384f31525542d2d3adf625609eb6a8c5c3de6\": rpc error: code = NotFound desc = could not find container \"cfac5c7ae0ada4ff9d421819065384f31525542d2d3adf625609eb6a8c5c3de6\": container with ID starting with cfac5c7ae0ada4ff9d421819065384f31525542d2d3adf625609eb6a8c5c3de6 not found: ID does not exist" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.132797 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6c1054e6-3738-4f8c-b7a0-6e52d922b623-serving-cert\") pod \"controller-manager-64f784f9d7-sd44p\" (UID: \"6c1054e6-3738-4f8c-b7a0-6e52d922b623\") " pod="openshift-controller-manager/controller-manager-64f784f9d7-sd44p" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.137941 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4kxwz\" (UniqueName: \"kubernetes.io/projected/6c1054e6-3738-4f8c-b7a0-6e52d922b623-kube-api-access-4kxwz\") pod \"controller-manager-64f784f9d7-sd44p\" (UID: \"6c1054e6-3738-4f8c-b7a0-6e52d922b623\") " pod="openshift-controller-manager/controller-manager-64f784f9d7-sd44p" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.138619 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jm2n\" (UniqueName: \"kubernetes.io/projected/a5ee83ff-fcfc-4318-8968-e79a68e1173f-kube-api-access-5jm2n\") pod \"route-controller-manager-655448c569-dnkfs\" (UID: \"a5ee83ff-fcfc-4318-8968-e79a68e1173f\") " pod="openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.176975 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-64f784f9d7-sd44p" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.177414 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4qzrw" podStartSLOduration=2.198856899 podStartE2EDuration="55.177396695s" podCreationTimestamp="2025-12-02 11:53:43 +0000 UTC" firstStartedPulling="2025-12-02 11:53:44.498930695 +0000 UTC m=+154.237481044" lastFinishedPulling="2025-12-02 11:54:37.477470491 +0000 UTC m=+207.216020840" observedRunningTime="2025-12-02 11:54:38.158335361 +0000 UTC m=+207.896885710" watchObservedRunningTime="2025-12-02 11:54:38.177396695 +0000 UTC m=+207.915947054" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.189370 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zxl2w"] Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.190812 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.191144 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zxl2w"] Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.407438 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs"] Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.454280 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-64f784f9d7-sd44p"] Dec 02 11:54:38 crc kubenswrapper[4721]: W1202 11:54:38.468821 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6c1054e6_3738_4f8c_b7a0_6e52d922b623.slice/crio-efc585a7e6f6f61e57115bf2fa993d9e03a0cb89b5a48e2d19dedea306d39b09 WatchSource:0}: Error finding container efc585a7e6f6f61e57115bf2fa993d9e03a0cb89b5a48e2d19dedea306d39b09: Status 404 returned error can't find the container with id efc585a7e6f6f61e57115bf2fa993d9e03a0cb89b5a48e2d19dedea306d39b09 Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.859738 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f4e7fae-d9a4-4f20-8658-4544860eaced" path="/var/lib/kubelet/pods/1f4e7fae-d9a4-4f20-8658-4544860eaced/volumes" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.860633 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91923222-03a2-43ce-8382-75953af7dcfc" path="/var/lib/kubelet/pods/91923222-03a2-43ce-8382-75953af7dcfc/volumes" Dec 02 11:54:38 crc kubenswrapper[4721]: I1202 11:54:38.861282 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c02678af-0ebc-4689-ad5c-1300cb95439e" path="/var/lib/kubelet/pods/c02678af-0ebc-4689-ad5c-1300cb95439e/volumes" Dec 02 11:54:39 crc kubenswrapper[4721]: I1202 11:54:39.069753 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8fw86" event={"ID":"94f45fa8-3517-4376-b5c0-8d6cf0254027","Type":"ContainerStarted","Data":"477cba68f14762f95f51d20ad48f30ae54fb8041127c7c3e3c602b5494104e9e"} Dec 02 11:54:39 crc kubenswrapper[4721]: I1202 11:54:39.071867 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-69prw" event={"ID":"e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d","Type":"ContainerStarted","Data":"a711879d034115b61845a1472d890b4c3fae8867277dabbc065e854dd9bec906"} Dec 02 11:54:39 crc kubenswrapper[4721]: I1202 11:54:39.074034 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wpd4r" event={"ID":"9610e93f-3f1c-4f61-a171-2e9739620178","Type":"ContainerStarted","Data":"5af4e5eb4f428d90bc547de39939580839d087ab396a92a5c9d4860ab54f8a37"} Dec 02 11:54:39 crc kubenswrapper[4721]: I1202 11:54:39.076960 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs" event={"ID":"a5ee83ff-fcfc-4318-8968-e79a68e1173f","Type":"ContainerStarted","Data":"da2983f939d1e65f54872c99df9eafc4de756d77065e0c90d54af211ba8bb3a7"} Dec 02 11:54:39 crc kubenswrapper[4721]: I1202 11:54:39.077020 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs" event={"ID":"a5ee83ff-fcfc-4318-8968-e79a68e1173f","Type":"ContainerStarted","Data":"db243b3f7f3e3cef8322d0fd435d8e1207c22de5423fd5a2a55581e556ce7690"} Dec 02 11:54:39 crc kubenswrapper[4721]: I1202 11:54:39.077047 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs" Dec 02 11:54:39 crc kubenswrapper[4721]: I1202 11:54:39.079417 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4k25r" event={"ID":"f1e35ec2-7693-4103-97e1-3304b8ac93f4","Type":"ContainerStarted","Data":"f242372eeed94c90970ae44510bb1d6efd1b66e4504ab8b24938701c3189a833"} Dec 02 11:54:39 crc kubenswrapper[4721]: I1202 11:54:39.081152 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-64f784f9d7-sd44p" event={"ID":"6c1054e6-3738-4f8c-b7a0-6e52d922b623","Type":"ContainerStarted","Data":"dce6c4759af41e11de4257d1a6cf772834b0b952bed9a56a1b5fdb183c03dc55"} Dec 02 11:54:39 crc kubenswrapper[4721]: I1202 11:54:39.081178 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-64f784f9d7-sd44p" event={"ID":"6c1054e6-3738-4f8c-b7a0-6e52d922b623","Type":"ContainerStarted","Data":"efc585a7e6f6f61e57115bf2fa993d9e03a0cb89b5a48e2d19dedea306d39b09"} Dec 02 11:54:39 crc kubenswrapper[4721]: I1202 11:54:39.081411 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-64f784f9d7-sd44p" Dec 02 11:54:39 crc kubenswrapper[4721]: I1202 11:54:39.092160 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-64f784f9d7-sd44p" Dec 02 11:54:39 crc kubenswrapper[4721]: I1202 11:54:39.123173 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8fw86" podStartSLOduration=1.779392218 podStartE2EDuration="58.12315224s" podCreationTimestamp="2025-12-02 11:53:41 +0000 UTC" firstStartedPulling="2025-12-02 11:53:42.375003986 +0000 UTC m=+152.113554335" lastFinishedPulling="2025-12-02 11:54:38.718764008 +0000 UTC m=+208.457314357" observedRunningTime="2025-12-02 11:54:39.116707532 +0000 UTC m=+208.855257881" watchObservedRunningTime="2025-12-02 11:54:39.12315224 +0000 UTC m=+208.861702599" Dec 02 11:54:39 crc kubenswrapper[4721]: I1202 11:54:39.137386 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs" Dec 02 11:54:39 crc kubenswrapper[4721]: I1202 11:54:39.148146 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs" podStartSLOduration=3.148125776 podStartE2EDuration="3.148125776s" podCreationTimestamp="2025-12-02 11:54:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:54:39.145434053 +0000 UTC m=+208.883984422" watchObservedRunningTime="2025-12-02 11:54:39.148125776 +0000 UTC m=+208.886676125" Dec 02 11:54:39 crc kubenswrapper[4721]: I1202 11:54:39.181074 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wpd4r" podStartSLOduration=3.021915728 podStartE2EDuration="58.181059735s" podCreationTimestamp="2025-12-02 11:53:41 +0000 UTC" firstStartedPulling="2025-12-02 11:53:43.431135541 +0000 UTC m=+153.169685890" lastFinishedPulling="2025-12-02 11:54:38.590279548 +0000 UTC m=+208.328829897" observedRunningTime="2025-12-02 11:54:39.179117256 +0000 UTC m=+208.917667605" watchObservedRunningTime="2025-12-02 11:54:39.181059735 +0000 UTC m=+208.919610074" Dec 02 11:54:39 crc kubenswrapper[4721]: I1202 11:54:39.207106 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4k25r" podStartSLOduration=1.844326733 podStartE2EDuration="58.207087834s" podCreationTimestamp="2025-12-02 11:53:41 +0000 UTC" firstStartedPulling="2025-12-02 11:53:42.37829179 +0000 UTC m=+152.116842139" lastFinishedPulling="2025-12-02 11:54:38.741052891 +0000 UTC m=+208.479603240" observedRunningTime="2025-12-02 11:54:39.205446423 +0000 UTC m=+208.943996792" watchObservedRunningTime="2025-12-02 11:54:39.207087834 +0000 UTC m=+208.945638183" Dec 02 11:54:39 crc kubenswrapper[4721]: I1202 11:54:39.249216 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-69prw" podStartSLOduration=3.099981249 podStartE2EDuration="58.249201884s" podCreationTimestamp="2025-12-02 11:53:41 +0000 UTC" firstStartedPulling="2025-12-02 11:53:43.408086302 +0000 UTC m=+153.146636651" lastFinishedPulling="2025-12-02 11:54:38.557306947 +0000 UTC m=+208.295857286" observedRunningTime="2025-12-02 11:54:39.231843443 +0000 UTC m=+208.970393792" watchObservedRunningTime="2025-12-02 11:54:39.249201884 +0000 UTC m=+208.987752233" Dec 02 11:54:39 crc kubenswrapper[4721]: I1202 11:54:39.249765 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-64f784f9d7-sd44p" podStartSLOduration=3.249761402 podStartE2EDuration="3.249761402s" podCreationTimestamp="2025-12-02 11:54:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:54:39.24646128 +0000 UTC m=+208.985011649" watchObservedRunningTime="2025-12-02 11:54:39.249761402 +0000 UTC m=+208.988311751" Dec 02 11:54:41 crc kubenswrapper[4721]: I1202 11:54:41.670655 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8fw86" Dec 02 11:54:41 crc kubenswrapper[4721]: I1202 11:54:41.671153 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8fw86" Dec 02 11:54:41 crc kubenswrapper[4721]: I1202 11:54:41.711988 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8fw86" Dec 02 11:54:41 crc kubenswrapper[4721]: I1202 11:54:41.874285 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4k25r" Dec 02 11:54:41 crc kubenswrapper[4721]: I1202 11:54:41.874336 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4k25r" Dec 02 11:54:41 crc kubenswrapper[4721]: I1202 11:54:41.911004 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4k25r" Dec 02 11:54:42 crc kubenswrapper[4721]: I1202 11:54:42.098149 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-69prw" Dec 02 11:54:42 crc kubenswrapper[4721]: I1202 11:54:42.098190 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-69prw" Dec 02 11:54:42 crc kubenswrapper[4721]: I1202 11:54:42.135277 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-69prw" Dec 02 11:54:42 crc kubenswrapper[4721]: I1202 11:54:42.270682 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wpd4r" Dec 02 11:54:42 crc kubenswrapper[4721]: I1202 11:54:42.270747 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wpd4r" Dec 02 11:54:42 crc kubenswrapper[4721]: I1202 11:54:42.309947 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wpd4r" Dec 02 11:54:43 crc kubenswrapper[4721]: I1202 11:54:43.136880 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wpd4r" Dec 02 11:54:43 crc kubenswrapper[4721]: I1202 11:54:43.137005 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-69prw" Dec 02 11:54:43 crc kubenswrapper[4721]: I1202 11:54:43.675095 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4qzrw" Dec 02 11:54:43 crc kubenswrapper[4721]: I1202 11:54:43.675156 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4qzrw" Dec 02 11:54:43 crc kubenswrapper[4721]: I1202 11:54:43.727694 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4qzrw" Dec 02 11:54:44 crc kubenswrapper[4721]: I1202 11:54:44.069279 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7l4c7" Dec 02 11:54:44 crc kubenswrapper[4721]: I1202 11:54:44.069331 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7l4c7" Dec 02 11:54:44 crc kubenswrapper[4721]: I1202 11:54:44.112218 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7l4c7" Dec 02 11:54:44 crc kubenswrapper[4721]: I1202 11:54:44.162142 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4qzrw" Dec 02 11:54:44 crc kubenswrapper[4721]: I1202 11:54:44.176696 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7l4c7" Dec 02 11:54:45 crc kubenswrapper[4721]: I1202 11:54:45.289272 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-69prw"] Dec 02 11:54:45 crc kubenswrapper[4721]: I1202 11:54:45.290968 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-69prw" podUID="e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d" containerName="registry-server" containerID="cri-o://a711879d034115b61845a1472d890b4c3fae8867277dabbc065e854dd9bec906" gracePeriod=2 Dec 02 11:54:45 crc kubenswrapper[4721]: I1202 11:54:45.486676 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wpd4r"] Dec 02 11:54:45 crc kubenswrapper[4721]: I1202 11:54:45.486895 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-wpd4r" podUID="9610e93f-3f1c-4f61-a171-2e9739620178" containerName="registry-server" containerID="cri-o://5af4e5eb4f428d90bc547de39939580839d087ab396a92a5c9d4860ab54f8a37" gracePeriod=2 Dec 02 11:54:47 crc kubenswrapper[4721]: I1202 11:54:47.690295 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7l4c7"] Dec 02 11:54:47 crc kubenswrapper[4721]: I1202 11:54:47.690566 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7l4c7" podUID="4dddd92c-299e-49fa-8c1b-e68c31f2a743" containerName="registry-server" containerID="cri-o://5b6de36a171a5e09f2e921864e600aae567fde419dd016b150b055229c90b920" gracePeriod=2 Dec 02 11:54:47 crc kubenswrapper[4721]: I1202 11:54:47.715258 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wpd4r" Dec 02 11:54:47 crc kubenswrapper[4721]: I1202 11:54:47.832666 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5hrbj\" (UniqueName: \"kubernetes.io/projected/9610e93f-3f1c-4f61-a171-2e9739620178-kube-api-access-5hrbj\") pod \"9610e93f-3f1c-4f61-a171-2e9739620178\" (UID: \"9610e93f-3f1c-4f61-a171-2e9739620178\") " Dec 02 11:54:47 crc kubenswrapper[4721]: I1202 11:54:47.832714 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9610e93f-3f1c-4f61-a171-2e9739620178-catalog-content\") pod \"9610e93f-3f1c-4f61-a171-2e9739620178\" (UID: \"9610e93f-3f1c-4f61-a171-2e9739620178\") " Dec 02 11:54:47 crc kubenswrapper[4721]: I1202 11:54:47.832839 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9610e93f-3f1c-4f61-a171-2e9739620178-utilities\") pod \"9610e93f-3f1c-4f61-a171-2e9739620178\" (UID: \"9610e93f-3f1c-4f61-a171-2e9739620178\") " Dec 02 11:54:47 crc kubenswrapper[4721]: I1202 11:54:47.833853 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9610e93f-3f1c-4f61-a171-2e9739620178-utilities" (OuterVolumeSpecName: "utilities") pod "9610e93f-3f1c-4f61-a171-2e9739620178" (UID: "9610e93f-3f1c-4f61-a171-2e9739620178"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:54:47 crc kubenswrapper[4721]: I1202 11:54:47.841515 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9610e93f-3f1c-4f61-a171-2e9739620178-kube-api-access-5hrbj" (OuterVolumeSpecName: "kube-api-access-5hrbj") pod "9610e93f-3f1c-4f61-a171-2e9739620178" (UID: "9610e93f-3f1c-4f61-a171-2e9739620178"). InnerVolumeSpecName "kube-api-access-5hrbj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:54:47 crc kubenswrapper[4721]: I1202 11:54:47.934047 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9610e93f-3f1c-4f61-a171-2e9739620178-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:47 crc kubenswrapper[4721]: I1202 11:54:47.934085 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5hrbj\" (UniqueName: \"kubernetes.io/projected/9610e93f-3f1c-4f61-a171-2e9739620178-kube-api-access-5hrbj\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:47 crc kubenswrapper[4721]: I1202 11:54:47.952782 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9610e93f-3f1c-4f61-a171-2e9739620178-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9610e93f-3f1c-4f61-a171-2e9739620178" (UID: "9610e93f-3f1c-4f61-a171-2e9739620178"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:54:48 crc kubenswrapper[4721]: I1202 11:54:48.035711 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9610e93f-3f1c-4f61-a171-2e9739620178-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:48 crc kubenswrapper[4721]: I1202 11:54:48.132476 4721 generic.go:334] "Generic (PLEG): container finished" podID="e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d" containerID="a711879d034115b61845a1472d890b4c3fae8867277dabbc065e854dd9bec906" exitCode=0 Dec 02 11:54:48 crc kubenswrapper[4721]: I1202 11:54:48.132549 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-69prw" event={"ID":"e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d","Type":"ContainerDied","Data":"a711879d034115b61845a1472d890b4c3fae8867277dabbc065e854dd9bec906"} Dec 02 11:54:48 crc kubenswrapper[4721]: I1202 11:54:48.135564 4721 generic.go:334] "Generic (PLEG): container finished" podID="9610e93f-3f1c-4f61-a171-2e9739620178" containerID="5af4e5eb4f428d90bc547de39939580839d087ab396a92a5c9d4860ab54f8a37" exitCode=0 Dec 02 11:54:48 crc kubenswrapper[4721]: I1202 11:54:48.135634 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wpd4r" Dec 02 11:54:48 crc kubenswrapper[4721]: I1202 11:54:48.135619 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wpd4r" event={"ID":"9610e93f-3f1c-4f61-a171-2e9739620178","Type":"ContainerDied","Data":"5af4e5eb4f428d90bc547de39939580839d087ab396a92a5c9d4860ab54f8a37"} Dec 02 11:54:48 crc kubenswrapper[4721]: I1202 11:54:48.135764 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wpd4r" event={"ID":"9610e93f-3f1c-4f61-a171-2e9739620178","Type":"ContainerDied","Data":"e61b7a3190369cf575115bf01f498a1eef6b84f39cab33098ae75483d70dee56"} Dec 02 11:54:48 crc kubenswrapper[4721]: I1202 11:54:48.135790 4721 scope.go:117] "RemoveContainer" containerID="5af4e5eb4f428d90bc547de39939580839d087ab396a92a5c9d4860ab54f8a37" Dec 02 11:54:48 crc kubenswrapper[4721]: I1202 11:54:48.145889 4721 generic.go:334] "Generic (PLEG): container finished" podID="4dddd92c-299e-49fa-8c1b-e68c31f2a743" containerID="5b6de36a171a5e09f2e921864e600aae567fde419dd016b150b055229c90b920" exitCode=0 Dec 02 11:54:48 crc kubenswrapper[4721]: I1202 11:54:48.145961 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7l4c7" event={"ID":"4dddd92c-299e-49fa-8c1b-e68c31f2a743","Type":"ContainerDied","Data":"5b6de36a171a5e09f2e921864e600aae567fde419dd016b150b055229c90b920"} Dec 02 11:54:48 crc kubenswrapper[4721]: I1202 11:54:48.161071 4721 scope.go:117] "RemoveContainer" containerID="e4f5e3b41638ff1217704c220cacdb3f5dd6c0c516ac13d6613d7b3ff3b69c8b" Dec 02 11:54:48 crc kubenswrapper[4721]: I1202 11:54:48.167212 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wpd4r"] Dec 02 11:54:48 crc kubenswrapper[4721]: I1202 11:54:48.171551 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-wpd4r"] Dec 02 11:54:48 crc kubenswrapper[4721]: I1202 11:54:48.181714 4721 scope.go:117] "RemoveContainer" containerID="103e4b9f210b16b87fcb300e4e6e18e880bb41f0b454db426330f29a80b2a148" Dec 02 11:54:48 crc kubenswrapper[4721]: I1202 11:54:48.197756 4721 scope.go:117] "RemoveContainer" containerID="5af4e5eb4f428d90bc547de39939580839d087ab396a92a5c9d4860ab54f8a37" Dec 02 11:54:48 crc kubenswrapper[4721]: E1202 11:54:48.198242 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5af4e5eb4f428d90bc547de39939580839d087ab396a92a5c9d4860ab54f8a37\": container with ID starting with 5af4e5eb4f428d90bc547de39939580839d087ab396a92a5c9d4860ab54f8a37 not found: ID does not exist" containerID="5af4e5eb4f428d90bc547de39939580839d087ab396a92a5c9d4860ab54f8a37" Dec 02 11:54:48 crc kubenswrapper[4721]: I1202 11:54:48.198271 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5af4e5eb4f428d90bc547de39939580839d087ab396a92a5c9d4860ab54f8a37"} err="failed to get container status \"5af4e5eb4f428d90bc547de39939580839d087ab396a92a5c9d4860ab54f8a37\": rpc error: code = NotFound desc = could not find container \"5af4e5eb4f428d90bc547de39939580839d087ab396a92a5c9d4860ab54f8a37\": container with ID starting with 5af4e5eb4f428d90bc547de39939580839d087ab396a92a5c9d4860ab54f8a37 not found: ID does not exist" Dec 02 11:54:48 crc kubenswrapper[4721]: I1202 11:54:48.198293 4721 scope.go:117] "RemoveContainer" containerID="e4f5e3b41638ff1217704c220cacdb3f5dd6c0c516ac13d6613d7b3ff3b69c8b" Dec 02 11:54:48 crc kubenswrapper[4721]: E1202 11:54:48.198967 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4f5e3b41638ff1217704c220cacdb3f5dd6c0c516ac13d6613d7b3ff3b69c8b\": container with ID starting with e4f5e3b41638ff1217704c220cacdb3f5dd6c0c516ac13d6613d7b3ff3b69c8b not found: ID does not exist" containerID="e4f5e3b41638ff1217704c220cacdb3f5dd6c0c516ac13d6613d7b3ff3b69c8b" Dec 02 11:54:48 crc kubenswrapper[4721]: I1202 11:54:48.199019 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4f5e3b41638ff1217704c220cacdb3f5dd6c0c516ac13d6613d7b3ff3b69c8b"} err="failed to get container status \"e4f5e3b41638ff1217704c220cacdb3f5dd6c0c516ac13d6613d7b3ff3b69c8b\": rpc error: code = NotFound desc = could not find container \"e4f5e3b41638ff1217704c220cacdb3f5dd6c0c516ac13d6613d7b3ff3b69c8b\": container with ID starting with e4f5e3b41638ff1217704c220cacdb3f5dd6c0c516ac13d6613d7b3ff3b69c8b not found: ID does not exist" Dec 02 11:54:48 crc kubenswrapper[4721]: I1202 11:54:48.199059 4721 scope.go:117] "RemoveContainer" containerID="103e4b9f210b16b87fcb300e4e6e18e880bb41f0b454db426330f29a80b2a148" Dec 02 11:54:48 crc kubenswrapper[4721]: E1202 11:54:48.199541 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"103e4b9f210b16b87fcb300e4e6e18e880bb41f0b454db426330f29a80b2a148\": container with ID starting with 103e4b9f210b16b87fcb300e4e6e18e880bb41f0b454db426330f29a80b2a148 not found: ID does not exist" containerID="103e4b9f210b16b87fcb300e4e6e18e880bb41f0b454db426330f29a80b2a148" Dec 02 11:54:48 crc kubenswrapper[4721]: I1202 11:54:48.199606 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"103e4b9f210b16b87fcb300e4e6e18e880bb41f0b454db426330f29a80b2a148"} err="failed to get container status \"103e4b9f210b16b87fcb300e4e6e18e880bb41f0b454db426330f29a80b2a148\": rpc error: code = NotFound desc = could not find container \"103e4b9f210b16b87fcb300e4e6e18e880bb41f0b454db426330f29a80b2a148\": container with ID starting with 103e4b9f210b16b87fcb300e4e6e18e880bb41f0b454db426330f29a80b2a148 not found: ID does not exist" Dec 02 11:54:48 crc kubenswrapper[4721]: I1202 11:54:48.858466 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9610e93f-3f1c-4f61-a171-2e9739620178" path="/var/lib/kubelet/pods/9610e93f-3f1c-4f61-a171-2e9739620178/volumes" Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.019126 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-69prw" Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.109100 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7l4c7" Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.154756 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d-catalog-content\") pod \"e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d\" (UID: \"e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d\") " Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.154842 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6nc77\" (UniqueName: \"kubernetes.io/projected/e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d-kube-api-access-6nc77\") pod \"e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d\" (UID: \"e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d\") " Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.155280 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d-utilities\") pod \"e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d\" (UID: \"e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d\") " Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.158581 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-69prw" event={"ID":"e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d","Type":"ContainerDied","Data":"f27d05f5596d53b546419e7399b6ab0e35f07c539623c16bc7ad04430189a8aa"} Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.158700 4721 scope.go:117] "RemoveContainer" containerID="a711879d034115b61845a1472d890b4c3fae8867277dabbc065e854dd9bec906" Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.158853 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-69prw" Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.158930 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d-utilities" (OuterVolumeSpecName: "utilities") pod "e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d" (UID: "e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.159655 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.164141 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d-kube-api-access-6nc77" (OuterVolumeSpecName: "kube-api-access-6nc77") pod "e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d" (UID: "e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d"). InnerVolumeSpecName "kube-api-access-6nc77". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.164566 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7l4c7" event={"ID":"4dddd92c-299e-49fa-8c1b-e68c31f2a743","Type":"ContainerDied","Data":"a5b0106e884aa9c1e3dc49ed453e0077441941a3bead9fcaaa050b0c378fca07"} Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.164662 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7l4c7" Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.184101 4721 scope.go:117] "RemoveContainer" containerID="4434ec90dc8402b85d6ec58f17fc8679fed278b122aafdd167704c57d887d0e7" Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.202136 4721 scope.go:117] "RemoveContainer" containerID="5f103363afda81262bed0ead42b675e283ce72dcc3ee538618aa32abbb889584" Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.210278 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d" (UID: "e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.217756 4721 scope.go:117] "RemoveContainer" containerID="5b6de36a171a5e09f2e921864e600aae567fde419dd016b150b055229c90b920" Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.235696 4721 scope.go:117] "RemoveContainer" containerID="60324830e7b1e411a913d1eae277bac39ff38fc28b5cddadc6bbb2cf94bcf495" Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.254336 4721 scope.go:117] "RemoveContainer" containerID="e4b93877861582bc1afbe0e0b3f58ceaebc15fe1dd953386675d1d5ad1727dfb" Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.261140 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4dddd92c-299e-49fa-8c1b-e68c31f2a743-catalog-content\") pod \"4dddd92c-299e-49fa-8c1b-e68c31f2a743\" (UID: \"4dddd92c-299e-49fa-8c1b-e68c31f2a743\") " Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.261247 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4dddd92c-299e-49fa-8c1b-e68c31f2a743-utilities\") pod \"4dddd92c-299e-49fa-8c1b-e68c31f2a743\" (UID: \"4dddd92c-299e-49fa-8c1b-e68c31f2a743\") " Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.261317 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d84nk\" (UniqueName: \"kubernetes.io/projected/4dddd92c-299e-49fa-8c1b-e68c31f2a743-kube-api-access-d84nk\") pod \"4dddd92c-299e-49fa-8c1b-e68c31f2a743\" (UID: \"4dddd92c-299e-49fa-8c1b-e68c31f2a743\") " Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.261741 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.261765 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6nc77\" (UniqueName: \"kubernetes.io/projected/e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d-kube-api-access-6nc77\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.262164 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4dddd92c-299e-49fa-8c1b-e68c31f2a743-utilities" (OuterVolumeSpecName: "utilities") pod "4dddd92c-299e-49fa-8c1b-e68c31f2a743" (UID: "4dddd92c-299e-49fa-8c1b-e68c31f2a743"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.267844 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4dddd92c-299e-49fa-8c1b-e68c31f2a743-kube-api-access-d84nk" (OuterVolumeSpecName: "kube-api-access-d84nk") pod "4dddd92c-299e-49fa-8c1b-e68c31f2a743" (UID: "4dddd92c-299e-49fa-8c1b-e68c31f2a743"). InnerVolumeSpecName "kube-api-access-d84nk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.282848 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4dddd92c-299e-49fa-8c1b-e68c31f2a743-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4dddd92c-299e-49fa-8c1b-e68c31f2a743" (UID: "4dddd92c-299e-49fa-8c1b-e68c31f2a743"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.364251 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4dddd92c-299e-49fa-8c1b-e68c31f2a743-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.364338 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d84nk\" (UniqueName: \"kubernetes.io/projected/4dddd92c-299e-49fa-8c1b-e68c31f2a743-kube-api-access-d84nk\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.364403 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4dddd92c-299e-49fa-8c1b-e68c31f2a743-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.493824 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-69prw"] Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.501419 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-69prw"] Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.510030 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7l4c7"] Dec 02 11:54:49 crc kubenswrapper[4721]: I1202 11:54:49.513228 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7l4c7"] Dec 02 11:54:50 crc kubenswrapper[4721]: I1202 11:54:50.850249 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4dddd92c-299e-49fa-8c1b-e68c31f2a743" path="/var/lib/kubelet/pods/4dddd92c-299e-49fa-8c1b-e68c31f2a743/volumes" Dec 02 11:54:50 crc kubenswrapper[4721]: I1202 11:54:50.851375 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d" path="/var/lib/kubelet/pods/e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d/volumes" Dec 02 11:54:51 crc kubenswrapper[4721]: I1202 11:54:51.706029 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8fw86" Dec 02 11:54:51 crc kubenswrapper[4721]: I1202 11:54:51.924146 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4k25r" Dec 02 11:54:55 crc kubenswrapper[4721]: I1202 11:54:55.703536 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mcr6s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 11:54:55 crc kubenswrapper[4721]: I1202 11:54:55.703904 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 11:54:55 crc kubenswrapper[4721]: I1202 11:54:55.703950 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" Dec 02 11:54:55 crc kubenswrapper[4721]: I1202 11:54:55.704466 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff"} pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 11:54:55 crc kubenswrapper[4721]: I1202 11:54:55.704536 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" containerID="cri-o://408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff" gracePeriod=600 Dec 02 11:54:56 crc kubenswrapper[4721]: I1202 11:54:56.206128 4721 generic.go:334] "Generic (PLEG): container finished" podID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerID="408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff" exitCode=0 Dec 02 11:54:56 crc kubenswrapper[4721]: I1202 11:54:56.206220 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" event={"ID":"141842b6-fd0c-48e6-9795-a627e6f96bd4","Type":"ContainerDied","Data":"408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff"} Dec 02 11:54:56 crc kubenswrapper[4721]: I1202 11:54:56.494486 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-64f784f9d7-sd44p"] Dec 02 11:54:56 crc kubenswrapper[4721]: I1202 11:54:56.494783 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-64f784f9d7-sd44p" podUID="6c1054e6-3738-4f8c-b7a0-6e52d922b623" containerName="controller-manager" containerID="cri-o://dce6c4759af41e11de4257d1a6cf772834b0b952bed9a56a1b5fdb183c03dc55" gracePeriod=30 Dec 02 11:54:56 crc kubenswrapper[4721]: I1202 11:54:56.496811 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs"] Dec 02 11:54:56 crc kubenswrapper[4721]: I1202 11:54:56.497027 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs" podUID="a5ee83ff-fcfc-4318-8968-e79a68e1173f" containerName="route-controller-manager" containerID="cri-o://da2983f939d1e65f54872c99df9eafc4de756d77065e0c90d54af211ba8bb3a7" gracePeriod=30 Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.000722 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.120255 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a5ee83ff-fcfc-4318-8968-e79a68e1173f-client-ca\") pod \"a5ee83ff-fcfc-4318-8968-e79a68e1173f\" (UID: \"a5ee83ff-fcfc-4318-8968-e79a68e1173f\") " Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.120324 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5jm2n\" (UniqueName: \"kubernetes.io/projected/a5ee83ff-fcfc-4318-8968-e79a68e1173f-kube-api-access-5jm2n\") pod \"a5ee83ff-fcfc-4318-8968-e79a68e1173f\" (UID: \"a5ee83ff-fcfc-4318-8968-e79a68e1173f\") " Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.120382 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5ee83ff-fcfc-4318-8968-e79a68e1173f-config\") pod \"a5ee83ff-fcfc-4318-8968-e79a68e1173f\" (UID: \"a5ee83ff-fcfc-4318-8968-e79a68e1173f\") " Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.120401 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a5ee83ff-fcfc-4318-8968-e79a68e1173f-serving-cert\") pod \"a5ee83ff-fcfc-4318-8968-e79a68e1173f\" (UID: \"a5ee83ff-fcfc-4318-8968-e79a68e1173f\") " Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.121506 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a5ee83ff-fcfc-4318-8968-e79a68e1173f-client-ca" (OuterVolumeSpecName: "client-ca") pod "a5ee83ff-fcfc-4318-8968-e79a68e1173f" (UID: "a5ee83ff-fcfc-4318-8968-e79a68e1173f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.121785 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a5ee83ff-fcfc-4318-8968-e79a68e1173f-config" (OuterVolumeSpecName: "config") pod "a5ee83ff-fcfc-4318-8968-e79a68e1173f" (UID: "a5ee83ff-fcfc-4318-8968-e79a68e1173f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.125267 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5ee83ff-fcfc-4318-8968-e79a68e1173f-kube-api-access-5jm2n" (OuterVolumeSpecName: "kube-api-access-5jm2n") pod "a5ee83ff-fcfc-4318-8968-e79a68e1173f" (UID: "a5ee83ff-fcfc-4318-8968-e79a68e1173f"). InnerVolumeSpecName "kube-api-access-5jm2n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.125399 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5ee83ff-fcfc-4318-8968-e79a68e1173f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "a5ee83ff-fcfc-4318-8968-e79a68e1173f" (UID: "a5ee83ff-fcfc-4318-8968-e79a68e1173f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.159717 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-64f784f9d7-sd44p" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.212615 4721 generic.go:334] "Generic (PLEG): container finished" podID="a5ee83ff-fcfc-4318-8968-e79a68e1173f" containerID="da2983f939d1e65f54872c99df9eafc4de756d77065e0c90d54af211ba8bb3a7" exitCode=0 Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.212654 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs" event={"ID":"a5ee83ff-fcfc-4318-8968-e79a68e1173f","Type":"ContainerDied","Data":"da2983f939d1e65f54872c99df9eafc4de756d77065e0c90d54af211ba8bb3a7"} Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.212688 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs" event={"ID":"a5ee83ff-fcfc-4318-8968-e79a68e1173f","Type":"ContainerDied","Data":"db243b3f7f3e3cef8322d0fd435d8e1207c22de5423fd5a2a55581e556ce7690"} Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.212691 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.212704 4721 scope.go:117] "RemoveContainer" containerID="da2983f939d1e65f54872c99df9eafc4de756d77065e0c90d54af211ba8bb3a7" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.214381 4721 generic.go:334] "Generic (PLEG): container finished" podID="6c1054e6-3738-4f8c-b7a0-6e52d922b623" containerID="dce6c4759af41e11de4257d1a6cf772834b0b952bed9a56a1b5fdb183c03dc55" exitCode=0 Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.214442 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-64f784f9d7-sd44p" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.214449 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-64f784f9d7-sd44p" event={"ID":"6c1054e6-3738-4f8c-b7a0-6e52d922b623","Type":"ContainerDied","Data":"dce6c4759af41e11de4257d1a6cf772834b0b952bed9a56a1b5fdb183c03dc55"} Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.214475 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-64f784f9d7-sd44p" event={"ID":"6c1054e6-3738-4f8c-b7a0-6e52d922b623","Type":"ContainerDied","Data":"efc585a7e6f6f61e57115bf2fa993d9e03a0cb89b5a48e2d19dedea306d39b09"} Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.217862 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" event={"ID":"141842b6-fd0c-48e6-9795-a627e6f96bd4","Type":"ContainerStarted","Data":"af0d455046abe7542ae5be55dc88c0a9b4a1f4a60770dc97714ed00134bf7dd9"} Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.221775 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6c1054e6-3738-4f8c-b7a0-6e52d922b623-serving-cert\") pod \"6c1054e6-3738-4f8c-b7a0-6e52d922b623\" (UID: \"6c1054e6-3738-4f8c-b7a0-6e52d922b623\") " Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.221828 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c1054e6-3738-4f8c-b7a0-6e52d922b623-config\") pod \"6c1054e6-3738-4f8c-b7a0-6e52d922b623\" (UID: \"6c1054e6-3738-4f8c-b7a0-6e52d922b623\") " Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.221910 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4kxwz\" (UniqueName: \"kubernetes.io/projected/6c1054e6-3738-4f8c-b7a0-6e52d922b623-kube-api-access-4kxwz\") pod \"6c1054e6-3738-4f8c-b7a0-6e52d922b623\" (UID: \"6c1054e6-3738-4f8c-b7a0-6e52d922b623\") " Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.221927 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6c1054e6-3738-4f8c-b7a0-6e52d922b623-proxy-ca-bundles\") pod \"6c1054e6-3738-4f8c-b7a0-6e52d922b623\" (UID: \"6c1054e6-3738-4f8c-b7a0-6e52d922b623\") " Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.221983 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6c1054e6-3738-4f8c-b7a0-6e52d922b623-client-ca\") pod \"6c1054e6-3738-4f8c-b7a0-6e52d922b623\" (UID: \"6c1054e6-3738-4f8c-b7a0-6e52d922b623\") " Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.222173 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a5ee83ff-fcfc-4318-8968-e79a68e1173f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.222191 4721 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a5ee83ff-fcfc-4318-8968-e79a68e1173f-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.222203 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5jm2n\" (UniqueName: \"kubernetes.io/projected/a5ee83ff-fcfc-4318-8968-e79a68e1173f-kube-api-access-5jm2n\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.222212 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5ee83ff-fcfc-4318-8968-e79a68e1173f-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.222754 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c1054e6-3738-4f8c-b7a0-6e52d922b623-client-ca" (OuterVolumeSpecName: "client-ca") pod "6c1054e6-3738-4f8c-b7a0-6e52d922b623" (UID: "6c1054e6-3738-4f8c-b7a0-6e52d922b623"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.223224 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c1054e6-3738-4f8c-b7a0-6e52d922b623-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "6c1054e6-3738-4f8c-b7a0-6e52d922b623" (UID: "6c1054e6-3738-4f8c-b7a0-6e52d922b623"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.223795 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c1054e6-3738-4f8c-b7a0-6e52d922b623-config" (OuterVolumeSpecName: "config") pod "6c1054e6-3738-4f8c-b7a0-6e52d922b623" (UID: "6c1054e6-3738-4f8c-b7a0-6e52d922b623"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.224620 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c1054e6-3738-4f8c-b7a0-6e52d922b623-kube-api-access-4kxwz" (OuterVolumeSpecName: "kube-api-access-4kxwz") pod "6c1054e6-3738-4f8c-b7a0-6e52d922b623" (UID: "6c1054e6-3738-4f8c-b7a0-6e52d922b623"). InnerVolumeSpecName "kube-api-access-4kxwz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.224820 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c1054e6-3738-4f8c-b7a0-6e52d922b623-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6c1054e6-3738-4f8c-b7a0-6e52d922b623" (UID: "6c1054e6-3738-4f8c-b7a0-6e52d922b623"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.231403 4721 scope.go:117] "RemoveContainer" containerID="da2983f939d1e65f54872c99df9eafc4de756d77065e0c90d54af211ba8bb3a7" Dec 02 11:54:57 crc kubenswrapper[4721]: E1202 11:54:57.231858 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da2983f939d1e65f54872c99df9eafc4de756d77065e0c90d54af211ba8bb3a7\": container with ID starting with da2983f939d1e65f54872c99df9eafc4de756d77065e0c90d54af211ba8bb3a7 not found: ID does not exist" containerID="da2983f939d1e65f54872c99df9eafc4de756d77065e0c90d54af211ba8bb3a7" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.231895 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da2983f939d1e65f54872c99df9eafc4de756d77065e0c90d54af211ba8bb3a7"} err="failed to get container status \"da2983f939d1e65f54872c99df9eafc4de756d77065e0c90d54af211ba8bb3a7\": rpc error: code = NotFound desc = could not find container \"da2983f939d1e65f54872c99df9eafc4de756d77065e0c90d54af211ba8bb3a7\": container with ID starting with da2983f939d1e65f54872c99df9eafc4de756d77065e0c90d54af211ba8bb3a7 not found: ID does not exist" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.231920 4721 scope.go:117] "RemoveContainer" containerID="dce6c4759af41e11de4257d1a6cf772834b0b952bed9a56a1b5fdb183c03dc55" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.241157 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs"] Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.243511 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-655448c569-dnkfs"] Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.247075 4721 scope.go:117] "RemoveContainer" containerID="dce6c4759af41e11de4257d1a6cf772834b0b952bed9a56a1b5fdb183c03dc55" Dec 02 11:54:57 crc kubenswrapper[4721]: E1202 11:54:57.247539 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dce6c4759af41e11de4257d1a6cf772834b0b952bed9a56a1b5fdb183c03dc55\": container with ID starting with dce6c4759af41e11de4257d1a6cf772834b0b952bed9a56a1b5fdb183c03dc55 not found: ID does not exist" containerID="dce6c4759af41e11de4257d1a6cf772834b0b952bed9a56a1b5fdb183c03dc55" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.247653 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dce6c4759af41e11de4257d1a6cf772834b0b952bed9a56a1b5fdb183c03dc55"} err="failed to get container status \"dce6c4759af41e11de4257d1a6cf772834b0b952bed9a56a1b5fdb183c03dc55\": rpc error: code = NotFound desc = could not find container \"dce6c4759af41e11de4257d1a6cf772834b0b952bed9a56a1b5fdb183c03dc55\": container with ID starting with dce6c4759af41e11de4257d1a6cf772834b0b952bed9a56a1b5fdb183c03dc55 not found: ID does not exist" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.324494 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4kxwz\" (UniqueName: \"kubernetes.io/projected/6c1054e6-3738-4f8c-b7a0-6e52d922b623-kube-api-access-4kxwz\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.324545 4721 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6c1054e6-3738-4f8c-b7a0-6e52d922b623-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.324568 4721 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6c1054e6-3738-4f8c-b7a0-6e52d922b623-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.324588 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6c1054e6-3738-4f8c-b7a0-6e52d922b623-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.324606 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c1054e6-3738-4f8c-b7a0-6e52d922b623-config\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.555430 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-64f784f9d7-sd44p"] Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.562056 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-64f784f9d7-sd44p"] Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.865262 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-74869b7654-zrjcg"] Dec 02 11:54:57 crc kubenswrapper[4721]: E1202 11:54:57.865515 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dddd92c-299e-49fa-8c1b-e68c31f2a743" containerName="registry-server" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.865527 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dddd92c-299e-49fa-8c1b-e68c31f2a743" containerName="registry-server" Dec 02 11:54:57 crc kubenswrapper[4721]: E1202 11:54:57.865539 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c1054e6-3738-4f8c-b7a0-6e52d922b623" containerName="controller-manager" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.865545 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c1054e6-3738-4f8c-b7a0-6e52d922b623" containerName="controller-manager" Dec 02 11:54:57 crc kubenswrapper[4721]: E1202 11:54:57.865554 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dddd92c-299e-49fa-8c1b-e68c31f2a743" containerName="extract-utilities" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.865560 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dddd92c-299e-49fa-8c1b-e68c31f2a743" containerName="extract-utilities" Dec 02 11:54:57 crc kubenswrapper[4721]: E1202 11:54:57.865568 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5ee83ff-fcfc-4318-8968-e79a68e1173f" containerName="route-controller-manager" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.865573 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5ee83ff-fcfc-4318-8968-e79a68e1173f" containerName="route-controller-manager" Dec 02 11:54:57 crc kubenswrapper[4721]: E1202 11:54:57.865583 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d" containerName="registry-server" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.865589 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d" containerName="registry-server" Dec 02 11:54:57 crc kubenswrapper[4721]: E1202 11:54:57.865600 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d" containerName="extract-utilities" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.865605 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d" containerName="extract-utilities" Dec 02 11:54:57 crc kubenswrapper[4721]: E1202 11:54:57.865612 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d" containerName="extract-content" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.865619 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d" containerName="extract-content" Dec 02 11:54:57 crc kubenswrapper[4721]: E1202 11:54:57.865626 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9610e93f-3f1c-4f61-a171-2e9739620178" containerName="registry-server" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.865631 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="9610e93f-3f1c-4f61-a171-2e9739620178" containerName="registry-server" Dec 02 11:54:57 crc kubenswrapper[4721]: E1202 11:54:57.865639 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9610e93f-3f1c-4f61-a171-2e9739620178" containerName="extract-content" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.865644 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="9610e93f-3f1c-4f61-a171-2e9739620178" containerName="extract-content" Dec 02 11:54:57 crc kubenswrapper[4721]: E1202 11:54:57.865653 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9610e93f-3f1c-4f61-a171-2e9739620178" containerName="extract-utilities" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.865659 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="9610e93f-3f1c-4f61-a171-2e9739620178" containerName="extract-utilities" Dec 02 11:54:57 crc kubenswrapper[4721]: E1202 11:54:57.865669 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dddd92c-299e-49fa-8c1b-e68c31f2a743" containerName="extract-content" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.865674 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dddd92c-299e-49fa-8c1b-e68c31f2a743" containerName="extract-content" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.865750 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5ee83ff-fcfc-4318-8968-e79a68e1173f" containerName="route-controller-manager" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.865761 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="9610e93f-3f1c-4f61-a171-2e9739620178" containerName="registry-server" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.865770 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1fcc52b-0fe2-4f76-9f6c-3cef8b7f183d" containerName="registry-server" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.865778 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c1054e6-3738-4f8c-b7a0-6e52d922b623" containerName="controller-manager" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.865786 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dddd92c-299e-49fa-8c1b-e68c31f2a743" containerName="registry-server" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.866101 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-74869b7654-zrjcg" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.868605 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.869046 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.869783 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-749fbbcd54-qcldc"] Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.870738 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-749fbbcd54-qcldc" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.869893 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.869933 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.869973 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.870017 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.876456 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.876835 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.876918 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.877001 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.877188 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.877423 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.880010 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.885058 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-74869b7654-zrjcg"] Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.888378 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-749fbbcd54-qcldc"] Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.931180 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7vks\" (UniqueName: \"kubernetes.io/projected/8d56ef08-7afd-4684-98fb-8e8027b44028-kube-api-access-x7vks\") pod \"route-controller-manager-749fbbcd54-qcldc\" (UID: \"8d56ef08-7afd-4684-98fb-8e8027b44028\") " pod="openshift-route-controller-manager/route-controller-manager-749fbbcd54-qcldc" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.931235 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cea69a68-9029-4555-b808-2c74dbbdcd6f-serving-cert\") pod \"controller-manager-74869b7654-zrjcg\" (UID: \"cea69a68-9029-4555-b808-2c74dbbdcd6f\") " pod="openshift-controller-manager/controller-manager-74869b7654-zrjcg" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.931258 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d56ef08-7afd-4684-98fb-8e8027b44028-config\") pod \"route-controller-manager-749fbbcd54-qcldc\" (UID: \"8d56ef08-7afd-4684-98fb-8e8027b44028\") " pod="openshift-route-controller-manager/route-controller-manager-749fbbcd54-qcldc" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.931273 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lj5zc\" (UniqueName: \"kubernetes.io/projected/cea69a68-9029-4555-b808-2c74dbbdcd6f-kube-api-access-lj5zc\") pod \"controller-manager-74869b7654-zrjcg\" (UID: \"cea69a68-9029-4555-b808-2c74dbbdcd6f\") " pod="openshift-controller-manager/controller-manager-74869b7654-zrjcg" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.931409 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cea69a68-9029-4555-b808-2c74dbbdcd6f-config\") pod \"controller-manager-74869b7654-zrjcg\" (UID: \"cea69a68-9029-4555-b808-2c74dbbdcd6f\") " pod="openshift-controller-manager/controller-manager-74869b7654-zrjcg" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.931438 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cea69a68-9029-4555-b808-2c74dbbdcd6f-proxy-ca-bundles\") pod \"controller-manager-74869b7654-zrjcg\" (UID: \"cea69a68-9029-4555-b808-2c74dbbdcd6f\") " pod="openshift-controller-manager/controller-manager-74869b7654-zrjcg" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.931510 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cea69a68-9029-4555-b808-2c74dbbdcd6f-client-ca\") pod \"controller-manager-74869b7654-zrjcg\" (UID: \"cea69a68-9029-4555-b808-2c74dbbdcd6f\") " pod="openshift-controller-manager/controller-manager-74869b7654-zrjcg" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.931554 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8d56ef08-7afd-4684-98fb-8e8027b44028-client-ca\") pod \"route-controller-manager-749fbbcd54-qcldc\" (UID: \"8d56ef08-7afd-4684-98fb-8e8027b44028\") " pod="openshift-route-controller-manager/route-controller-manager-749fbbcd54-qcldc" Dec 02 11:54:57 crc kubenswrapper[4721]: I1202 11:54:57.931570 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8d56ef08-7afd-4684-98fb-8e8027b44028-serving-cert\") pod \"route-controller-manager-749fbbcd54-qcldc\" (UID: \"8d56ef08-7afd-4684-98fb-8e8027b44028\") " pod="openshift-route-controller-manager/route-controller-manager-749fbbcd54-qcldc" Dec 02 11:54:58 crc kubenswrapper[4721]: I1202 11:54:58.032650 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7vks\" (UniqueName: \"kubernetes.io/projected/8d56ef08-7afd-4684-98fb-8e8027b44028-kube-api-access-x7vks\") pod \"route-controller-manager-749fbbcd54-qcldc\" (UID: \"8d56ef08-7afd-4684-98fb-8e8027b44028\") " pod="openshift-route-controller-manager/route-controller-manager-749fbbcd54-qcldc" Dec 02 11:54:58 crc kubenswrapper[4721]: I1202 11:54:58.033010 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cea69a68-9029-4555-b808-2c74dbbdcd6f-serving-cert\") pod \"controller-manager-74869b7654-zrjcg\" (UID: \"cea69a68-9029-4555-b808-2c74dbbdcd6f\") " pod="openshift-controller-manager/controller-manager-74869b7654-zrjcg" Dec 02 11:54:58 crc kubenswrapper[4721]: I1202 11:54:58.033046 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d56ef08-7afd-4684-98fb-8e8027b44028-config\") pod \"route-controller-manager-749fbbcd54-qcldc\" (UID: \"8d56ef08-7afd-4684-98fb-8e8027b44028\") " pod="openshift-route-controller-manager/route-controller-manager-749fbbcd54-qcldc" Dec 02 11:54:58 crc kubenswrapper[4721]: I1202 11:54:58.033064 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lj5zc\" (UniqueName: \"kubernetes.io/projected/cea69a68-9029-4555-b808-2c74dbbdcd6f-kube-api-access-lj5zc\") pod \"controller-manager-74869b7654-zrjcg\" (UID: \"cea69a68-9029-4555-b808-2c74dbbdcd6f\") " pod="openshift-controller-manager/controller-manager-74869b7654-zrjcg" Dec 02 11:54:58 crc kubenswrapper[4721]: I1202 11:54:58.033097 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cea69a68-9029-4555-b808-2c74dbbdcd6f-config\") pod \"controller-manager-74869b7654-zrjcg\" (UID: \"cea69a68-9029-4555-b808-2c74dbbdcd6f\") " pod="openshift-controller-manager/controller-manager-74869b7654-zrjcg" Dec 02 11:54:58 crc kubenswrapper[4721]: I1202 11:54:58.033113 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cea69a68-9029-4555-b808-2c74dbbdcd6f-proxy-ca-bundles\") pod \"controller-manager-74869b7654-zrjcg\" (UID: \"cea69a68-9029-4555-b808-2c74dbbdcd6f\") " pod="openshift-controller-manager/controller-manager-74869b7654-zrjcg" Dec 02 11:54:58 crc kubenswrapper[4721]: I1202 11:54:58.033141 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cea69a68-9029-4555-b808-2c74dbbdcd6f-client-ca\") pod \"controller-manager-74869b7654-zrjcg\" (UID: \"cea69a68-9029-4555-b808-2c74dbbdcd6f\") " pod="openshift-controller-manager/controller-manager-74869b7654-zrjcg" Dec 02 11:54:58 crc kubenswrapper[4721]: I1202 11:54:58.033165 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8d56ef08-7afd-4684-98fb-8e8027b44028-client-ca\") pod \"route-controller-manager-749fbbcd54-qcldc\" (UID: \"8d56ef08-7afd-4684-98fb-8e8027b44028\") " pod="openshift-route-controller-manager/route-controller-manager-749fbbcd54-qcldc" Dec 02 11:54:58 crc kubenswrapper[4721]: I1202 11:54:58.033182 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8d56ef08-7afd-4684-98fb-8e8027b44028-serving-cert\") pod \"route-controller-manager-749fbbcd54-qcldc\" (UID: \"8d56ef08-7afd-4684-98fb-8e8027b44028\") " pod="openshift-route-controller-manager/route-controller-manager-749fbbcd54-qcldc" Dec 02 11:54:58 crc kubenswrapper[4721]: I1202 11:54:58.034657 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8d56ef08-7afd-4684-98fb-8e8027b44028-client-ca\") pod \"route-controller-manager-749fbbcd54-qcldc\" (UID: \"8d56ef08-7afd-4684-98fb-8e8027b44028\") " pod="openshift-route-controller-manager/route-controller-manager-749fbbcd54-qcldc" Dec 02 11:54:58 crc kubenswrapper[4721]: I1202 11:54:58.034792 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d56ef08-7afd-4684-98fb-8e8027b44028-config\") pod \"route-controller-manager-749fbbcd54-qcldc\" (UID: \"8d56ef08-7afd-4684-98fb-8e8027b44028\") " pod="openshift-route-controller-manager/route-controller-manager-749fbbcd54-qcldc" Dec 02 11:54:58 crc kubenswrapper[4721]: I1202 11:54:58.035444 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cea69a68-9029-4555-b808-2c74dbbdcd6f-config\") pod \"controller-manager-74869b7654-zrjcg\" (UID: \"cea69a68-9029-4555-b808-2c74dbbdcd6f\") " pod="openshift-controller-manager/controller-manager-74869b7654-zrjcg" Dec 02 11:54:58 crc kubenswrapper[4721]: I1202 11:54:58.035574 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cea69a68-9029-4555-b808-2c74dbbdcd6f-proxy-ca-bundles\") pod \"controller-manager-74869b7654-zrjcg\" (UID: \"cea69a68-9029-4555-b808-2c74dbbdcd6f\") " pod="openshift-controller-manager/controller-manager-74869b7654-zrjcg" Dec 02 11:54:58 crc kubenswrapper[4721]: I1202 11:54:58.037818 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cea69a68-9029-4555-b808-2c74dbbdcd6f-serving-cert\") pod \"controller-manager-74869b7654-zrjcg\" (UID: \"cea69a68-9029-4555-b808-2c74dbbdcd6f\") " pod="openshift-controller-manager/controller-manager-74869b7654-zrjcg" Dec 02 11:54:58 crc kubenswrapper[4721]: I1202 11:54:58.044485 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cea69a68-9029-4555-b808-2c74dbbdcd6f-client-ca\") pod \"controller-manager-74869b7654-zrjcg\" (UID: \"cea69a68-9029-4555-b808-2c74dbbdcd6f\") " pod="openshift-controller-manager/controller-manager-74869b7654-zrjcg" Dec 02 11:54:58 crc kubenswrapper[4721]: I1202 11:54:58.045888 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8d56ef08-7afd-4684-98fb-8e8027b44028-serving-cert\") pod \"route-controller-manager-749fbbcd54-qcldc\" (UID: \"8d56ef08-7afd-4684-98fb-8e8027b44028\") " pod="openshift-route-controller-manager/route-controller-manager-749fbbcd54-qcldc" Dec 02 11:54:58 crc kubenswrapper[4721]: I1202 11:54:58.053091 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lj5zc\" (UniqueName: \"kubernetes.io/projected/cea69a68-9029-4555-b808-2c74dbbdcd6f-kube-api-access-lj5zc\") pod \"controller-manager-74869b7654-zrjcg\" (UID: \"cea69a68-9029-4555-b808-2c74dbbdcd6f\") " pod="openshift-controller-manager/controller-manager-74869b7654-zrjcg" Dec 02 11:54:58 crc kubenswrapper[4721]: I1202 11:54:58.054378 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7vks\" (UniqueName: \"kubernetes.io/projected/8d56ef08-7afd-4684-98fb-8e8027b44028-kube-api-access-x7vks\") pod \"route-controller-manager-749fbbcd54-qcldc\" (UID: \"8d56ef08-7afd-4684-98fb-8e8027b44028\") " pod="openshift-route-controller-manager/route-controller-manager-749fbbcd54-qcldc" Dec 02 11:54:58 crc kubenswrapper[4721]: I1202 11:54:58.200374 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-74869b7654-zrjcg" Dec 02 11:54:58 crc kubenswrapper[4721]: I1202 11:54:58.209650 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-749fbbcd54-qcldc" Dec 02 11:54:58 crc kubenswrapper[4721]: I1202 11:54:58.582929 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-74869b7654-zrjcg"] Dec 02 11:54:58 crc kubenswrapper[4721]: W1202 11:54:58.591880 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcea69a68_9029_4555_b808_2c74dbbdcd6f.slice/crio-a3124b773a6dd6121a52a1d27428a547af3c4770a4652e2d113f02aa9383925f WatchSource:0}: Error finding container a3124b773a6dd6121a52a1d27428a547af3c4770a4652e2d113f02aa9383925f: Status 404 returned error can't find the container with id a3124b773a6dd6121a52a1d27428a547af3c4770a4652e2d113f02aa9383925f Dec 02 11:54:58 crc kubenswrapper[4721]: I1202 11:54:58.631201 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-749fbbcd54-qcldc"] Dec 02 11:54:58 crc kubenswrapper[4721]: W1202 11:54:58.636703 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8d56ef08_7afd_4684_98fb_8e8027b44028.slice/crio-2f15d4cbfbfc23ea1589dd4fb95b5f1e202c05f74d4bd7259626994b01e1c518 WatchSource:0}: Error finding container 2f15d4cbfbfc23ea1589dd4fb95b5f1e202c05f74d4bd7259626994b01e1c518: Status 404 returned error can't find the container with id 2f15d4cbfbfc23ea1589dd4fb95b5f1e202c05f74d4bd7259626994b01e1c518 Dec 02 11:54:58 crc kubenswrapper[4721]: I1202 11:54:58.846484 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c1054e6-3738-4f8c-b7a0-6e52d922b623" path="/var/lib/kubelet/pods/6c1054e6-3738-4f8c-b7a0-6e52d922b623/volumes" Dec 02 11:54:58 crc kubenswrapper[4721]: I1202 11:54:58.848085 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5ee83ff-fcfc-4318-8968-e79a68e1173f" path="/var/lib/kubelet/pods/a5ee83ff-fcfc-4318-8968-e79a68e1173f/volumes" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.053986 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" podUID="9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7" containerName="oauth-openshift" containerID="cri-o://16c8b209eaf4383bf800c7d2c1687b4a3ebc6e04e36cfbedd9b318f510388e09" gracePeriod=15 Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.231905 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-749fbbcd54-qcldc" event={"ID":"8d56ef08-7afd-4684-98fb-8e8027b44028","Type":"ContainerStarted","Data":"d002691907fe4cc14470394b7bf9334528e5617e824a2d3cb1c0f9b254770d66"} Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.232182 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-749fbbcd54-qcldc" event={"ID":"8d56ef08-7afd-4684-98fb-8e8027b44028","Type":"ContainerStarted","Data":"2f15d4cbfbfc23ea1589dd4fb95b5f1e202c05f74d4bd7259626994b01e1c518"} Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.232197 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-749fbbcd54-qcldc" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.233852 4721 generic.go:334] "Generic (PLEG): container finished" podID="9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7" containerID="16c8b209eaf4383bf800c7d2c1687b4a3ebc6e04e36cfbedd9b318f510388e09" exitCode=0 Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.233891 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" event={"ID":"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7","Type":"ContainerDied","Data":"16c8b209eaf4383bf800c7d2c1687b4a3ebc6e04e36cfbedd9b318f510388e09"} Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.235421 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-74869b7654-zrjcg" event={"ID":"cea69a68-9029-4555-b808-2c74dbbdcd6f","Type":"ContainerStarted","Data":"5333eab3631e4059f1a28dadde054ccacfe8febe5ea2bcecc2cd86ef0f869641"} Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.235448 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-74869b7654-zrjcg" event={"ID":"cea69a68-9029-4555-b808-2c74dbbdcd6f","Type":"ContainerStarted","Data":"a3124b773a6dd6121a52a1d27428a547af3c4770a4652e2d113f02aa9383925f"} Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.235798 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-74869b7654-zrjcg" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.243174 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-74869b7654-zrjcg" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.250063 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-749fbbcd54-qcldc" podStartSLOduration=3.250047976 podStartE2EDuration="3.250047976s" podCreationTimestamp="2025-12-02 11:54:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:54:59.247253601 +0000 UTC m=+228.985803950" watchObservedRunningTime="2025-12-02 11:54:59.250047976 +0000 UTC m=+228.988598325" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.274398 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-74869b7654-zrjcg" podStartSLOduration=3.274376983 podStartE2EDuration="3.274376983s" podCreationTimestamp="2025-12-02 11:54:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:54:59.271455964 +0000 UTC m=+229.010006313" watchObservedRunningTime="2025-12-02 11:54:59.274376983 +0000 UTC m=+229.012927332" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.291956 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-749fbbcd54-qcldc" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.432870 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.549006 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-user-idp-0-file-data\") pod \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.549057 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-user-template-login\") pod \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.549111 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-trusted-ca-bundle\") pod \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.549130 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-user-template-provider-selection\") pod \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.549155 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-session\") pod \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.549172 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-router-certs\") pod \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.549186 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-serving-cert\") pod \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.549211 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-service-ca\") pod \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.549235 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6wkx\" (UniqueName: \"kubernetes.io/projected/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-kube-api-access-l6wkx\") pod \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.549254 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-ocp-branding-template\") pod \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.549281 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-audit-dir\") pod \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.549307 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-cliconfig\") pod \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.549322 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-audit-policies\") pod \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.549361 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-user-template-error\") pod \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\" (UID: \"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7\") " Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.550325 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7" (UID: "9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.550820 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7" (UID: "9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.550854 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7" (UID: "9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.550883 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7" (UID: "9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.551202 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7" (UID: "9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.554499 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7" (UID: "9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.554741 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7" (UID: "9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.555439 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7" (UID: "9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.555712 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7" (UID: "9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.556059 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7" (UID: "9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.556120 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7" (UID: "9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.559755 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7" (UID: "9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.559965 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7" (UID: "9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.560329 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-kube-api-access-l6wkx" (OuterVolumeSpecName: "kube-api-access-l6wkx") pod "9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7" (UID: "9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7"). InnerVolumeSpecName "kube-api-access-l6wkx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.651086 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.651124 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6wkx\" (UniqueName: \"kubernetes.io/projected/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-kube-api-access-l6wkx\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.651134 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.651144 4721 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.651153 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.651164 4721 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.651173 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.651182 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.651192 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.651201 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.651210 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.651222 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.651232 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 02 11:54:59 crc kubenswrapper[4721]: I1202 11:54:59.651243 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 11:55:00 crc kubenswrapper[4721]: I1202 11:55:00.241677 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" Dec 02 11:55:00 crc kubenswrapper[4721]: I1202 11:55:00.244629 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-wb9r7" event={"ID":"9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7","Type":"ContainerDied","Data":"55249d29ce7bb50c0d27a31f902d2db842411af786e1d061bbf888a22f19e6b8"} Dec 02 11:55:00 crc kubenswrapper[4721]: I1202 11:55:00.244729 4721 scope.go:117] "RemoveContainer" containerID="16c8b209eaf4383bf800c7d2c1687b4a3ebc6e04e36cfbedd9b318f510388e09" Dec 02 11:55:00 crc kubenswrapper[4721]: I1202 11:55:00.266377 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-wb9r7"] Dec 02 11:55:00 crc kubenswrapper[4721]: I1202 11:55:00.269447 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-wb9r7"] Dec 02 11:55:00 crc kubenswrapper[4721]: I1202 11:55:00.845844 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7" path="/var/lib/kubelet/pods/9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7/volumes" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.871488 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46"] Dec 02 11:55:05 crc kubenswrapper[4721]: E1202 11:55:05.872429 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7" containerName="oauth-openshift" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.872442 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7" containerName="oauth-openshift" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.872540 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d09f08e-47a3-4bd1-9ec6-e3d1f96a7fe7" containerName="oauth-openshift" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.872911 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.875096 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.875681 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.875752 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.876731 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.876879 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.877231 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.877261 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.877386 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.877448 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.877883 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.878053 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.878289 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.886459 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.897308 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46"] Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.899502 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.901817 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.925236 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.925319 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-audit-policies\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.925381 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-system-router-certs\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.925438 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-system-session\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.925477 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-audit-dir\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.925507 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.925533 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.925564 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-user-template-error\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.925613 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-system-service-ca\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.925646 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.925671 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jv45t\" (UniqueName: \"kubernetes.io/projected/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-kube-api-access-jv45t\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.925698 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-system-cliconfig\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.925753 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-user-template-login\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:05 crc kubenswrapper[4721]: I1202 11:55:05.925798 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-system-serving-cert\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.026891 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-system-serving-cert\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.026945 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.026973 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-audit-policies\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.026993 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-system-router-certs\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.027023 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-system-session\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.027047 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-audit-dir\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.027068 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.027102 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.027124 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-user-template-error\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.027148 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-system-service-ca\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.027172 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.027189 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jv45t\" (UniqueName: \"kubernetes.io/projected/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-kube-api-access-jv45t\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.027212 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-system-cliconfig\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.027233 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-user-template-login\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.028375 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-audit-dir\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.028607 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-audit-policies\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.029193 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-system-cliconfig\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.029330 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.029925 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-system-service-ca\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.034144 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-user-template-error\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.034875 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.035428 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-user-template-login\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.035778 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.036232 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-system-session\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.036559 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-system-router-certs\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.037752 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-system-serving-cert\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.039945 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.047644 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jv45t\" (UniqueName: \"kubernetes.io/projected/e73f2591-3f5f-48ba-9bb2-fc5cb5e90893-kube-api-access-jv45t\") pod \"oauth-openshift-bc9f7ddc4-tnr46\" (UID: \"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893\") " pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.190907 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.668383 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46"] Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.945888 4721 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.946587 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251" gracePeriod=15 Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.946734 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957" gracePeriod=15 Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.946779 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f" gracePeriod=15 Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.946841 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde" gracePeriod=15 Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.946873 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0" gracePeriod=15 Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.949731 4721 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 02 11:55:06 crc kubenswrapper[4721]: E1202 11:55:06.950117 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.950143 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 02 11:55:06 crc kubenswrapper[4721]: E1202 11:55:06.950171 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.950187 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 11:55:06 crc kubenswrapper[4721]: E1202 11:55:06.950217 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.950235 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 02 11:55:06 crc kubenswrapper[4721]: E1202 11:55:06.950268 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.950285 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 02 11:55:06 crc kubenswrapper[4721]: E1202 11:55:06.950310 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.950327 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 02 11:55:06 crc kubenswrapper[4721]: E1202 11:55:06.955895 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.955934 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.956158 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.956175 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.956189 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.956201 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.956213 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.956231 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 02 11:55:06 crc kubenswrapper[4721]: E1202 11:55:06.956368 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.956380 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.957813 4721 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.958911 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 11:55:06 crc kubenswrapper[4721]: I1202 11:55:06.962458 4721 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.040999 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.041042 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.041067 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.041087 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.041189 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.041240 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.041368 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.041397 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.143213 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.143282 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.143323 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.143376 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.143403 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.143410 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.143480 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.143475 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.143475 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.143423 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.143524 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.143534 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.144230 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.144297 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.144389 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.144461 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.297378 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.299048 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.299759 4721 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957" exitCode=0 Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.299798 4721 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f" exitCode=0 Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.299806 4721 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde" exitCode=0 Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.299817 4721 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0" exitCode=2 Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.299894 4721 scope.go:117] "RemoveContainer" containerID="7b9ddbd3a876fe714cf74e2a03588d60c192ae864ea16194e8899919301a7a40" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.302259 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" event={"ID":"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893","Type":"ContainerStarted","Data":"ecc419999271df2919bd6d18ae1729db5ac7b9b34f10f610efa90bd2b19a3b3b"} Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.302311 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" event={"ID":"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893","Type":"ContainerStarted","Data":"41a396ab6a3edc5b2a8acbc8eb58c447378a7062a509e0f4da68abfd16af94fb"} Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.302597 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.303100 4721 status_manager.go:851] "Failed to get status for pod" podUID="e73f2591-3f5f-48ba-9bb2-fc5cb5e90893" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-bc9f7ddc4-tnr46\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.304289 4721 generic.go:334] "Generic (PLEG): container finished" podID="855a31ab-b04f-4907-be30-115efd42a84e" containerID="0ec70084d6befc4cbf2d97a3805cd46931b6d600b74e64701bf74bba019aa92f" exitCode=0 Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.304351 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"855a31ab-b04f-4907-be30-115efd42a84e","Type":"ContainerDied","Data":"0ec70084d6befc4cbf2d97a3805cd46931b6d600b74e64701bf74bba019aa92f"} Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.304771 4721 status_manager.go:851] "Failed to get status for pod" podUID="855a31ab-b04f-4907-be30-115efd42a84e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.305057 4721 status_manager.go:851] "Failed to get status for pod" podUID="e73f2591-3f5f-48ba-9bb2-fc5cb5e90893" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-bc9f7ddc4-tnr46\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.346324 4721 patch_prober.go:28] interesting pod/oauth-openshift-bc9f7ddc4-tnr46 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.61:6443/healthz\": read tcp 10.217.0.2:40456->10.217.0.61:6443: read: connection reset by peer" start-of-body= Dec 02 11:55:07 crc kubenswrapper[4721]: I1202 11:55:07.346463 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" podUID="e73f2591-3f5f-48ba-9bb2-fc5cb5e90893" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.61:6443/healthz\": read tcp 10.217.0.2:40456->10.217.0.61:6443: read: connection reset by peer" Dec 02 11:55:07 crc kubenswrapper[4721]: E1202 11:55:07.915529 4721 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.169:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" volumeName="registry-storage" Dec 02 11:55:08 crc kubenswrapper[4721]: I1202 11:55:08.314265 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-bc9f7ddc4-tnr46_e73f2591-3f5f-48ba-9bb2-fc5cb5e90893/oauth-openshift/0.log" Dec 02 11:55:08 crc kubenswrapper[4721]: I1202 11:55:08.314314 4721 generic.go:334] "Generic (PLEG): container finished" podID="e73f2591-3f5f-48ba-9bb2-fc5cb5e90893" containerID="ecc419999271df2919bd6d18ae1729db5ac7b9b34f10f610efa90bd2b19a3b3b" exitCode=255 Dec 02 11:55:08 crc kubenswrapper[4721]: I1202 11:55:08.314390 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" event={"ID":"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893","Type":"ContainerDied","Data":"ecc419999271df2919bd6d18ae1729db5ac7b9b34f10f610efa90bd2b19a3b3b"} Dec 02 11:55:08 crc kubenswrapper[4721]: I1202 11:55:08.315411 4721 scope.go:117] "RemoveContainer" containerID="ecc419999271df2919bd6d18ae1729db5ac7b9b34f10f610efa90bd2b19a3b3b" Dec 02 11:55:08 crc kubenswrapper[4721]: I1202 11:55:08.315673 4721 status_manager.go:851] "Failed to get status for pod" podUID="855a31ab-b04f-4907-be30-115efd42a84e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:08 crc kubenswrapper[4721]: I1202 11:55:08.316197 4721 status_manager.go:851] "Failed to get status for pod" podUID="e73f2591-3f5f-48ba-9bb2-fc5cb5e90893" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-bc9f7ddc4-tnr46\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:08 crc kubenswrapper[4721]: I1202 11:55:08.319161 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 02 11:55:08 crc kubenswrapper[4721]: I1202 11:55:08.732599 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 02 11:55:08 crc kubenswrapper[4721]: I1202 11:55:08.733414 4721 status_manager.go:851] "Failed to get status for pod" podUID="855a31ab-b04f-4907-be30-115efd42a84e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:08 crc kubenswrapper[4721]: I1202 11:55:08.733589 4721 status_manager.go:851] "Failed to get status for pod" podUID="e73f2591-3f5f-48ba-9bb2-fc5cb5e90893" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-bc9f7ddc4-tnr46\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:08 crc kubenswrapper[4721]: I1202 11:55:08.867464 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/855a31ab-b04f-4907-be30-115efd42a84e-kubelet-dir\") pod \"855a31ab-b04f-4907-be30-115efd42a84e\" (UID: \"855a31ab-b04f-4907-be30-115efd42a84e\") " Dec 02 11:55:08 crc kubenswrapper[4721]: I1202 11:55:08.867540 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/855a31ab-b04f-4907-be30-115efd42a84e-kube-api-access\") pod \"855a31ab-b04f-4907-be30-115efd42a84e\" (UID: \"855a31ab-b04f-4907-be30-115efd42a84e\") " Dec 02 11:55:08 crc kubenswrapper[4721]: I1202 11:55:08.867576 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/855a31ab-b04f-4907-be30-115efd42a84e-var-lock\") pod \"855a31ab-b04f-4907-be30-115efd42a84e\" (UID: \"855a31ab-b04f-4907-be30-115efd42a84e\") " Dec 02 11:55:08 crc kubenswrapper[4721]: I1202 11:55:08.867529 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/855a31ab-b04f-4907-be30-115efd42a84e-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "855a31ab-b04f-4907-be30-115efd42a84e" (UID: "855a31ab-b04f-4907-be30-115efd42a84e"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 11:55:08 crc kubenswrapper[4721]: I1202 11:55:08.867739 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/855a31ab-b04f-4907-be30-115efd42a84e-var-lock" (OuterVolumeSpecName: "var-lock") pod "855a31ab-b04f-4907-be30-115efd42a84e" (UID: "855a31ab-b04f-4907-be30-115efd42a84e"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 11:55:08 crc kubenswrapper[4721]: I1202 11:55:08.867859 4721 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/855a31ab-b04f-4907-be30-115efd42a84e-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 02 11:55:08 crc kubenswrapper[4721]: I1202 11:55:08.867871 4721 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/855a31ab-b04f-4907-be30-115efd42a84e-var-lock\") on node \"crc\" DevicePath \"\"" Dec 02 11:55:08 crc kubenswrapper[4721]: I1202 11:55:08.875214 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/855a31ab-b04f-4907-be30-115efd42a84e-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "855a31ab-b04f-4907-be30-115efd42a84e" (UID: "855a31ab-b04f-4907-be30-115efd42a84e"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:55:08 crc kubenswrapper[4721]: I1202 11:55:08.969121 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/855a31ab-b04f-4907-be30-115efd42a84e-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.275961 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.277229 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.277847 4721 status_manager.go:851] "Failed to get status for pod" podUID="855a31ab-b04f-4907-be30-115efd42a84e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.278318 4721 status_manager.go:851] "Failed to get status for pod" podUID="e73f2591-3f5f-48ba-9bb2-fc5cb5e90893" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-bc9f7ddc4-tnr46\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.278688 4721 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.328411 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.328409 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"855a31ab-b04f-4907-be30-115efd42a84e","Type":"ContainerDied","Data":"57a9571429eeaf66cc9387c7d2880c276e210d6cccba4e63624130a247621d9e"} Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.328740 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="57a9571429eeaf66cc9387c7d2880c276e210d6cccba4e63624130a247621d9e" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.331282 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.332111 4721 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251" exitCode=0 Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.332194 4721 scope.go:117] "RemoveContainer" containerID="72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.332203 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.333942 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-bc9f7ddc4-tnr46_e73f2591-3f5f-48ba-9bb2-fc5cb5e90893/oauth-openshift/1.log" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.334477 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-bc9f7ddc4-tnr46_e73f2591-3f5f-48ba-9bb2-fc5cb5e90893/oauth-openshift/0.log" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.334513 4721 generic.go:334] "Generic (PLEG): container finished" podID="e73f2591-3f5f-48ba-9bb2-fc5cb5e90893" containerID="ff06c1f8edde6a34a46911603aa84881a958676d48c8f7944672883cf3419f8d" exitCode=255 Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.334532 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" event={"ID":"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893","Type":"ContainerDied","Data":"ff06c1f8edde6a34a46911603aa84881a958676d48c8f7944672883cf3419f8d"} Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.334926 4721 scope.go:117] "RemoveContainer" containerID="ff06c1f8edde6a34a46911603aa84881a958676d48c8f7944672883cf3419f8d" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.335224 4721 status_manager.go:851] "Failed to get status for pod" podUID="e73f2591-3f5f-48ba-9bb2-fc5cb5e90893" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-bc9f7ddc4-tnr46\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:09 crc kubenswrapper[4721]: E1202 11:55:09.335293 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oauth-openshift\" with CrashLoopBackOff: \"back-off 10s restarting failed container=oauth-openshift pod=oauth-openshift-bc9f7ddc4-tnr46_openshift-authentication(e73f2591-3f5f-48ba-9bb2-fc5cb5e90893)\"" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" podUID="e73f2591-3f5f-48ba-9bb2-fc5cb5e90893" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.335611 4721 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.335856 4721 status_manager.go:851] "Failed to get status for pod" podUID="855a31ab-b04f-4907-be30-115efd42a84e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.343782 4721 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.343934 4721 status_manager.go:851] "Failed to get status for pod" podUID="855a31ab-b04f-4907-be30-115efd42a84e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.344081 4721 status_manager.go:851] "Failed to get status for pod" podUID="e73f2591-3f5f-48ba-9bb2-fc5cb5e90893" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-bc9f7ddc4-tnr46\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.347310 4721 scope.go:117] "RemoveContainer" containerID="41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.360961 4721 scope.go:117] "RemoveContainer" containerID="c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.373531 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.373599 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.373646 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.373647 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.373696 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.373799 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.374075 4721 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.374095 4721 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.374107 4721 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.398271 4721 scope.go:117] "RemoveContainer" containerID="94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.408936 4721 scope.go:117] "RemoveContainer" containerID="7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.420620 4721 scope.go:117] "RemoveContainer" containerID="8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.436198 4721 scope.go:117] "RemoveContainer" containerID="72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957" Dec 02 11:55:09 crc kubenswrapper[4721]: E1202 11:55:09.436646 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\": container with ID starting with 72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957 not found: ID does not exist" containerID="72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.436684 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957"} err="failed to get container status \"72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\": rpc error: code = NotFound desc = could not find container \"72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957\": container with ID starting with 72a237b5ba186310710c1efe68672a366deb5c23102d204670c660e00a699957 not found: ID does not exist" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.436728 4721 scope.go:117] "RemoveContainer" containerID="41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f" Dec 02 11:55:09 crc kubenswrapper[4721]: E1202 11:55:09.437122 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\": container with ID starting with 41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f not found: ID does not exist" containerID="41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.437157 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f"} err="failed to get container status \"41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\": rpc error: code = NotFound desc = could not find container \"41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f\": container with ID starting with 41f53b6923b4f4c41b986a64443eecb4d161ac9bfbe070a1327facda088b771f not found: ID does not exist" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.437176 4721 scope.go:117] "RemoveContainer" containerID="c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde" Dec 02 11:55:09 crc kubenswrapper[4721]: E1202 11:55:09.437501 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\": container with ID starting with c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde not found: ID does not exist" containerID="c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.437528 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde"} err="failed to get container status \"c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\": rpc error: code = NotFound desc = could not find container \"c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde\": container with ID starting with c9d57ae3d79c4f29e2b169b2f54075558ea089d82a85426b360d1a7b1a4c4cde not found: ID does not exist" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.437547 4721 scope.go:117] "RemoveContainer" containerID="94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0" Dec 02 11:55:09 crc kubenswrapper[4721]: E1202 11:55:09.437835 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\": container with ID starting with 94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0 not found: ID does not exist" containerID="94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.437859 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0"} err="failed to get container status \"94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\": rpc error: code = NotFound desc = could not find container \"94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0\": container with ID starting with 94cf53535ef1151c0a351ad37587ba487cf370f1f6e435f8774629c836bbabe0 not found: ID does not exist" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.437874 4721 scope.go:117] "RemoveContainer" containerID="7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251" Dec 02 11:55:09 crc kubenswrapper[4721]: E1202 11:55:09.438245 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\": container with ID starting with 7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251 not found: ID does not exist" containerID="7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.438291 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251"} err="failed to get container status \"7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\": rpc error: code = NotFound desc = could not find container \"7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251\": container with ID starting with 7bb38bd11be4c82cdfef115bea2574d990f6825744500e505ffffe4e146cd251 not found: ID does not exist" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.438320 4721 scope.go:117] "RemoveContainer" containerID="8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f" Dec 02 11:55:09 crc kubenswrapper[4721]: E1202 11:55:09.438622 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\": container with ID starting with 8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f not found: ID does not exist" containerID="8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.438657 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f"} err="failed to get container status \"8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\": rpc error: code = NotFound desc = could not find container \"8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f\": container with ID starting with 8626f64cc927a0e0b926f5ff2d02a181625cea931800f0ee3b9026e3633b0a4f not found: ID does not exist" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.438678 4721 scope.go:117] "RemoveContainer" containerID="ecc419999271df2919bd6d18ae1729db5ac7b9b34f10f610efa90bd2b19a3b3b" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.645372 4721 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.645576 4721 status_manager.go:851] "Failed to get status for pod" podUID="855a31ab-b04f-4907-be30-115efd42a84e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:09 crc kubenswrapper[4721]: I1202 11:55:09.645738 4721 status_manager.go:851] "Failed to get status for pod" podUID="e73f2591-3f5f-48ba-9bb2-fc5cb5e90893" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-bc9f7ddc4-tnr46\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:10 crc kubenswrapper[4721]: I1202 11:55:10.340430 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-bc9f7ddc4-tnr46_e73f2591-3f5f-48ba-9bb2-fc5cb5e90893/oauth-openshift/1.log" Dec 02 11:55:10 crc kubenswrapper[4721]: I1202 11:55:10.341333 4721 scope.go:117] "RemoveContainer" containerID="ff06c1f8edde6a34a46911603aa84881a958676d48c8f7944672883cf3419f8d" Dec 02 11:55:10 crc kubenswrapper[4721]: I1202 11:55:10.341479 4721 status_manager.go:851] "Failed to get status for pod" podUID="e73f2591-3f5f-48ba-9bb2-fc5cb5e90893" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-bc9f7ddc4-tnr46\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:10 crc kubenswrapper[4721]: E1202 11:55:10.341511 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oauth-openshift\" with CrashLoopBackOff: \"back-off 10s restarting failed container=oauth-openshift pod=oauth-openshift-bc9f7ddc4-tnr46_openshift-authentication(e73f2591-3f5f-48ba-9bb2-fc5cb5e90893)\"" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" podUID="e73f2591-3f5f-48ba-9bb2-fc5cb5e90893" Dec 02 11:55:10 crc kubenswrapper[4721]: I1202 11:55:10.341917 4721 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:10 crc kubenswrapper[4721]: I1202 11:55:10.342134 4721 status_manager.go:851] "Failed to get status for pod" podUID="855a31ab-b04f-4907-be30-115efd42a84e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:10 crc kubenswrapper[4721]: I1202 11:55:10.842064 4721 status_manager.go:851] "Failed to get status for pod" podUID="855a31ab-b04f-4907-be30-115efd42a84e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:10 crc kubenswrapper[4721]: I1202 11:55:10.842468 4721 status_manager.go:851] "Failed to get status for pod" podUID="e73f2591-3f5f-48ba-9bb2-fc5cb5e90893" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-bc9f7ddc4-tnr46\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:10 crc kubenswrapper[4721]: I1202 11:55:10.842777 4721 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:10 crc kubenswrapper[4721]: I1202 11:55:10.845158 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 02 11:55:11 crc kubenswrapper[4721]: E1202 11:55:11.993123 4721 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.169:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 11:55:11 crc kubenswrapper[4721]: I1202 11:55:11.993533 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 11:55:12 crc kubenswrapper[4721]: W1202 11:55:12.009892 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-a5b994bcbbb196d442a2171f6a6989d28d791053b9b47f19a0c464ad67ab7e7f WatchSource:0}: Error finding container a5b994bcbbb196d442a2171f6a6989d28d791053b9b47f19a0c464ad67ab7e7f: Status 404 returned error can't find the container with id a5b994bcbbb196d442a2171f6a6989d28d791053b9b47f19a0c464ad67ab7e7f Dec 02 11:55:12 crc kubenswrapper[4721]: E1202 11:55:12.010624 4721 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/events\": dial tcp 38.102.83.169:6443: connect: connection refused" event="&Event{ObjectMeta:{oauth-openshift-bc9f7ddc4-tnr46.187d63eef715d97d openshift-authentication 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-authentication,Name:oauth-openshift-bc9f7ddc4-tnr46,UID:e73f2591-3f5f-48ba-9bb2-fc5cb5e90893,APIVersion:v1,ResourceVersion:29547,FieldPath:spec.containers{oauth-openshift},},Reason:Started,Message:Started container oauth-openshift,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-02 11:55:06.997873021 +0000 UTC m=+236.736423370,LastTimestamp:2025-12-02 11:55:06.997873021 +0000 UTC m=+236.736423370,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 02 11:55:12 crc kubenswrapper[4721]: I1202 11:55:12.353600 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"2b530ab7014e4ee84125b7ac0708824485e1e0a15b371160883ef2c506925042"} Dec 02 11:55:12 crc kubenswrapper[4721]: I1202 11:55:12.354008 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"a5b994bcbbb196d442a2171f6a6989d28d791053b9b47f19a0c464ad67ab7e7f"} Dec 02 11:55:12 crc kubenswrapper[4721]: E1202 11:55:12.354539 4721 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.169:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 11:55:12 crc kubenswrapper[4721]: I1202 11:55:12.354567 4721 status_manager.go:851] "Failed to get status for pod" podUID="855a31ab-b04f-4907-be30-115efd42a84e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:12 crc kubenswrapper[4721]: I1202 11:55:12.354885 4721 status_manager.go:851] "Failed to get status for pod" podUID="e73f2591-3f5f-48ba-9bb2-fc5cb5e90893" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-bc9f7ddc4-tnr46\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:14 crc kubenswrapper[4721]: E1202 11:55:14.755237 4721 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:14 crc kubenswrapper[4721]: E1202 11:55:14.756189 4721 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:14 crc kubenswrapper[4721]: E1202 11:55:14.756878 4721 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:14 crc kubenswrapper[4721]: E1202 11:55:14.757145 4721 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:14 crc kubenswrapper[4721]: E1202 11:55:14.757487 4721 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:14 crc kubenswrapper[4721]: I1202 11:55:14.757544 4721 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 02 11:55:14 crc kubenswrapper[4721]: E1202 11:55:14.757946 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.169:6443: connect: connection refused" interval="200ms" Dec 02 11:55:14 crc kubenswrapper[4721]: E1202 11:55:14.958939 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.169:6443: connect: connection refused" interval="400ms" Dec 02 11:55:15 crc kubenswrapper[4721]: E1202 11:55:15.360558 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.169:6443: connect: connection refused" interval="800ms" Dec 02 11:55:16 crc kubenswrapper[4721]: E1202 11:55:16.162247 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.169:6443: connect: connection refused" interval="1.6s" Dec 02 11:55:16 crc kubenswrapper[4721]: I1202 11:55:16.191685 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:16 crc kubenswrapper[4721]: I1202 11:55:16.191726 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:16 crc kubenswrapper[4721]: I1202 11:55:16.192243 4721 scope.go:117] "RemoveContainer" containerID="ff06c1f8edde6a34a46911603aa84881a958676d48c8f7944672883cf3419f8d" Dec 02 11:55:16 crc kubenswrapper[4721]: E1202 11:55:16.192434 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oauth-openshift\" with CrashLoopBackOff: \"back-off 10s restarting failed container=oauth-openshift pod=oauth-openshift-bc9f7ddc4-tnr46_openshift-authentication(e73f2591-3f5f-48ba-9bb2-fc5cb5e90893)\"" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" podUID="e73f2591-3f5f-48ba-9bb2-fc5cb5e90893" Dec 02 11:55:17 crc kubenswrapper[4721]: E1202 11:55:17.763539 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.169:6443: connect: connection refused" interval="3.2s" Dec 02 11:55:20 crc kubenswrapper[4721]: I1202 11:55:20.013629 4721 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Liveness probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 02 11:55:20 crc kubenswrapper[4721]: I1202 11:55:20.014109 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 02 11:55:20 crc kubenswrapper[4721]: I1202 11:55:20.419544 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 02 11:55:20 crc kubenswrapper[4721]: I1202 11:55:20.419603 4721 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5" exitCode=1 Dec 02 11:55:20 crc kubenswrapper[4721]: I1202 11:55:20.419634 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5"} Dec 02 11:55:20 crc kubenswrapper[4721]: I1202 11:55:20.420073 4721 scope.go:117] "RemoveContainer" containerID="7017c03fe7e94e553a5ae19d5bd4b708512d57de5999d43240ff7b4ec35b9cb5" Dec 02 11:55:20 crc kubenswrapper[4721]: I1202 11:55:20.420638 4721 status_manager.go:851] "Failed to get status for pod" podUID="e73f2591-3f5f-48ba-9bb2-fc5cb5e90893" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-bc9f7ddc4-tnr46\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:20 crc kubenswrapper[4721]: I1202 11:55:20.421206 4721 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:20 crc kubenswrapper[4721]: I1202 11:55:20.422119 4721 status_manager.go:851] "Failed to get status for pod" podUID="855a31ab-b04f-4907-be30-115efd42a84e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:20 crc kubenswrapper[4721]: E1202 11:55:20.595655 4721 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/events\": dial tcp 38.102.83.169:6443: connect: connection refused" event="&Event{ObjectMeta:{oauth-openshift-bc9f7ddc4-tnr46.187d63eef715d97d openshift-authentication 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-authentication,Name:oauth-openshift-bc9f7ddc4-tnr46,UID:e73f2591-3f5f-48ba-9bb2-fc5cb5e90893,APIVersion:v1,ResourceVersion:29547,FieldPath:spec.containers{oauth-openshift},},Reason:Started,Message:Started container oauth-openshift,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-02 11:55:06.997873021 +0000 UTC m=+236.736423370,LastTimestamp:2025-12-02 11:55:06.997873021 +0000 UTC m=+236.736423370,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 02 11:55:20 crc kubenswrapper[4721]: I1202 11:55:20.843204 4721 status_manager.go:851] "Failed to get status for pod" podUID="855a31ab-b04f-4907-be30-115efd42a84e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:20 crc kubenswrapper[4721]: I1202 11:55:20.843563 4721 status_manager.go:851] "Failed to get status for pod" podUID="e73f2591-3f5f-48ba-9bb2-fc5cb5e90893" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-bc9f7ddc4-tnr46\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:20 crc kubenswrapper[4721]: I1202 11:55:20.844018 4721 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:20 crc kubenswrapper[4721]: E1202 11:55:20.965900 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.169:6443: connect: connection refused" interval="6.4s" Dec 02 11:55:21 crc kubenswrapper[4721]: I1202 11:55:21.429538 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 02 11:55:21 crc kubenswrapper[4721]: I1202 11:55:21.429588 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"89c8b25fbdee460c42fc3bebb1d435f122f767954791b69376ef0eaab6e2365e"} Dec 02 11:55:21 crc kubenswrapper[4721]: I1202 11:55:21.430670 4721 status_manager.go:851] "Failed to get status for pod" podUID="855a31ab-b04f-4907-be30-115efd42a84e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:21 crc kubenswrapper[4721]: I1202 11:55:21.431122 4721 status_manager.go:851] "Failed to get status for pod" podUID="e73f2591-3f5f-48ba-9bb2-fc5cb5e90893" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-bc9f7ddc4-tnr46\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:21 crc kubenswrapper[4721]: I1202 11:55:21.431620 4721 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:21 crc kubenswrapper[4721]: I1202 11:55:21.839040 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:55:21 crc kubenswrapper[4721]: I1202 11:55:21.840586 4721 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:21 crc kubenswrapper[4721]: I1202 11:55:21.840939 4721 status_manager.go:851] "Failed to get status for pod" podUID="855a31ab-b04f-4907-be30-115efd42a84e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:21 crc kubenswrapper[4721]: I1202 11:55:21.841152 4721 status_manager.go:851] "Failed to get status for pod" podUID="e73f2591-3f5f-48ba-9bb2-fc5cb5e90893" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-bc9f7ddc4-tnr46\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:21 crc kubenswrapper[4721]: I1202 11:55:21.857146 4721 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c27f266b-3b00-44bd-8872-0dd43d8bd22f" Dec 02 11:55:21 crc kubenswrapper[4721]: I1202 11:55:21.857183 4721 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c27f266b-3b00-44bd-8872-0dd43d8bd22f" Dec 02 11:55:21 crc kubenswrapper[4721]: E1202 11:55:21.857746 4721 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.169:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:55:21 crc kubenswrapper[4721]: I1202 11:55:21.858489 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:55:22 crc kubenswrapper[4721]: I1202 11:55:22.438017 4721 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="5676b22c5b7c904c23beb7fad2717774091ab850bcb3b0771b53ccc2fd347658" exitCode=0 Dec 02 11:55:22 crc kubenswrapper[4721]: I1202 11:55:22.438057 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"5676b22c5b7c904c23beb7fad2717774091ab850bcb3b0771b53ccc2fd347658"} Dec 02 11:55:22 crc kubenswrapper[4721]: I1202 11:55:22.438104 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"9fb8e6dae6676b812024198ca9394aabe6b5a4db06a317839a88cd0df05042f3"} Dec 02 11:55:22 crc kubenswrapper[4721]: I1202 11:55:22.438416 4721 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c27f266b-3b00-44bd-8872-0dd43d8bd22f" Dec 02 11:55:22 crc kubenswrapper[4721]: I1202 11:55:22.438432 4721 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c27f266b-3b00-44bd-8872-0dd43d8bd22f" Dec 02 11:55:22 crc kubenswrapper[4721]: E1202 11:55:22.438892 4721 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.169:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:55:22 crc kubenswrapper[4721]: I1202 11:55:22.438905 4721 status_manager.go:851] "Failed to get status for pod" podUID="855a31ab-b04f-4907-be30-115efd42a84e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:22 crc kubenswrapper[4721]: I1202 11:55:22.439404 4721 status_manager.go:851] "Failed to get status for pod" podUID="e73f2591-3f5f-48ba-9bb2-fc5cb5e90893" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-bc9f7ddc4-tnr46\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:22 crc kubenswrapper[4721]: I1202 11:55:22.439894 4721 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.169:6443: connect: connection refused" Dec 02 11:55:23 crc kubenswrapper[4721]: I1202 11:55:23.454224 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b6285c82cd2359153ae2f4bb3dcb3282e556163633907b6c7a699e5e207691c1"} Dec 02 11:55:23 crc kubenswrapper[4721]: I1202 11:55:23.454929 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b0bc9dd5b5d2720712f069b447e2c9c721ce465e8843d834af9b8456b69d525f"} Dec 02 11:55:23 crc kubenswrapper[4721]: I1202 11:55:23.454973 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"22362f39ebe8b763b505ad6604fc7287cd0597820d6dbbe767f13bcbe24d7396"} Dec 02 11:55:23 crc kubenswrapper[4721]: I1202 11:55:23.454985 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"5b0faa2ae2612070a40b355b5e1dec92eedb9bf2dad2f3435a0bd631f50edef9"} Dec 02 11:55:24 crc kubenswrapper[4721]: I1202 11:55:24.493902 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"6fc755b88fb8e47460224d587c48de61240957a4eb750aebe0c4a2d813fecb5a"} Dec 02 11:55:24 crc kubenswrapper[4721]: I1202 11:55:24.494085 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:55:24 crc kubenswrapper[4721]: I1202 11:55:24.494191 4721 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c27f266b-3b00-44bd-8872-0dd43d8bd22f" Dec 02 11:55:24 crc kubenswrapper[4721]: I1202 11:55:24.494219 4721 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c27f266b-3b00-44bd-8872-0dd43d8bd22f" Dec 02 11:55:26 crc kubenswrapper[4721]: I1202 11:55:26.465461 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 11:55:26 crc kubenswrapper[4721]: I1202 11:55:26.859218 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:55:26 crc kubenswrapper[4721]: I1202 11:55:26.859268 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:55:26 crc kubenswrapper[4721]: I1202 11:55:26.865244 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:55:28 crc kubenswrapper[4721]: I1202 11:55:28.933269 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 11:55:28 crc kubenswrapper[4721]: I1202 11:55:28.939556 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 11:55:29 crc kubenswrapper[4721]: I1202 11:55:29.506961 4721 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:55:29 crc kubenswrapper[4721]: I1202 11:55:29.839614 4721 scope.go:117] "RemoveContainer" containerID="ff06c1f8edde6a34a46911603aa84881a958676d48c8f7944672883cf3419f8d" Dec 02 11:55:30 crc kubenswrapper[4721]: I1202 11:55:30.529708 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-bc9f7ddc4-tnr46_e73f2591-3f5f-48ba-9bb2-fc5cb5e90893/oauth-openshift/2.log" Dec 02 11:55:30 crc kubenswrapper[4721]: I1202 11:55:30.533311 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-bc9f7ddc4-tnr46_e73f2591-3f5f-48ba-9bb2-fc5cb5e90893/oauth-openshift/1.log" Dec 02 11:55:30 crc kubenswrapper[4721]: I1202 11:55:30.533399 4721 generic.go:334] "Generic (PLEG): container finished" podID="e73f2591-3f5f-48ba-9bb2-fc5cb5e90893" containerID="dd5cdd7fdee5174e89b9ac2244bfcec28086ab481db70e88c4102e2b1126e2f5" exitCode=255 Dec 02 11:55:30 crc kubenswrapper[4721]: I1202 11:55:30.533563 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" event={"ID":"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893","Type":"ContainerDied","Data":"dd5cdd7fdee5174e89b9ac2244bfcec28086ab481db70e88c4102e2b1126e2f5"} Dec 02 11:55:30 crc kubenswrapper[4721]: I1202 11:55:30.533839 4721 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c27f266b-3b00-44bd-8872-0dd43d8bd22f" Dec 02 11:55:30 crc kubenswrapper[4721]: I1202 11:55:30.533843 4721 scope.go:117] "RemoveContainer" containerID="ff06c1f8edde6a34a46911603aa84881a958676d48c8f7944672883cf3419f8d" Dec 02 11:55:30 crc kubenswrapper[4721]: I1202 11:55:30.533859 4721 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c27f266b-3b00-44bd-8872-0dd43d8bd22f" Dec 02 11:55:30 crc kubenswrapper[4721]: I1202 11:55:30.534418 4721 scope.go:117] "RemoveContainer" containerID="dd5cdd7fdee5174e89b9ac2244bfcec28086ab481db70e88c4102e2b1126e2f5" Dec 02 11:55:30 crc kubenswrapper[4721]: E1202 11:55:30.534796 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oauth-openshift\" with CrashLoopBackOff: \"back-off 20s restarting failed container=oauth-openshift pod=oauth-openshift-bc9f7ddc4-tnr46_openshift-authentication(e73f2591-3f5f-48ba-9bb2-fc5cb5e90893)\"" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" podUID="e73f2591-3f5f-48ba-9bb2-fc5cb5e90893" Dec 02 11:55:30 crc kubenswrapper[4721]: I1202 11:55:30.544883 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:55:30 crc kubenswrapper[4721]: I1202 11:55:30.875332 4721 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="cb47fb16-c9b3-41b7-91c0-7b615c51dc86" Dec 02 11:55:31 crc kubenswrapper[4721]: I1202 11:55:31.541718 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-bc9f7ddc4-tnr46_e73f2591-3f5f-48ba-9bb2-fc5cb5e90893/oauth-openshift/2.log" Dec 02 11:55:31 crc kubenswrapper[4721]: I1202 11:55:31.542214 4721 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c27f266b-3b00-44bd-8872-0dd43d8bd22f" Dec 02 11:55:31 crc kubenswrapper[4721]: I1202 11:55:31.542241 4721 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="c27f266b-3b00-44bd-8872-0dd43d8bd22f" Dec 02 11:55:31 crc kubenswrapper[4721]: I1202 11:55:31.546385 4721 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="cb47fb16-c9b3-41b7-91c0-7b615c51dc86" Dec 02 11:55:36 crc kubenswrapper[4721]: I1202 11:55:36.192066 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:36 crc kubenswrapper[4721]: I1202 11:55:36.192810 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:55:36 crc kubenswrapper[4721]: I1202 11:55:36.193605 4721 scope.go:117] "RemoveContainer" containerID="dd5cdd7fdee5174e89b9ac2244bfcec28086ab481db70e88c4102e2b1126e2f5" Dec 02 11:55:36 crc kubenswrapper[4721]: E1202 11:55:36.193967 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oauth-openshift\" with CrashLoopBackOff: \"back-off 20s restarting failed container=oauth-openshift pod=oauth-openshift-bc9f7ddc4-tnr46_openshift-authentication(e73f2591-3f5f-48ba-9bb2-fc5cb5e90893)\"" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" podUID="e73f2591-3f5f-48ba-9bb2-fc5cb5e90893" Dec 02 11:55:36 crc kubenswrapper[4721]: I1202 11:55:36.470988 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 11:55:39 crc kubenswrapper[4721]: I1202 11:55:39.242302 4721 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 02 11:55:40 crc kubenswrapper[4721]: I1202 11:55:40.258437 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 02 11:55:40 crc kubenswrapper[4721]: I1202 11:55:40.469521 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 02 11:55:40 crc kubenswrapper[4721]: I1202 11:55:40.496060 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 02 11:55:40 crc kubenswrapper[4721]: I1202 11:55:40.691710 4721 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 02 11:55:41 crc kubenswrapper[4721]: I1202 11:55:41.055146 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 02 11:55:41 crc kubenswrapper[4721]: I1202 11:55:41.085139 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 02 11:55:41 crc kubenswrapper[4721]: I1202 11:55:41.139120 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 02 11:55:41 crc kubenswrapper[4721]: I1202 11:55:41.173070 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 02 11:55:41 crc kubenswrapper[4721]: I1202 11:55:41.306174 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 02 11:55:41 crc kubenswrapper[4721]: I1202 11:55:41.367483 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 02 11:55:41 crc kubenswrapper[4721]: I1202 11:55:41.382393 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 02 11:55:41 crc kubenswrapper[4721]: I1202 11:55:41.388852 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 02 11:55:41 crc kubenswrapper[4721]: I1202 11:55:41.390844 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 02 11:55:41 crc kubenswrapper[4721]: I1202 11:55:41.433717 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 02 11:55:41 crc kubenswrapper[4721]: I1202 11:55:41.656319 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 02 11:55:41 crc kubenswrapper[4721]: I1202 11:55:41.751657 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 02 11:55:41 crc kubenswrapper[4721]: I1202 11:55:41.771314 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 02 11:55:41 crc kubenswrapper[4721]: I1202 11:55:41.918005 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 02 11:55:41 crc kubenswrapper[4721]: I1202 11:55:41.999420 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 02 11:55:42 crc kubenswrapper[4721]: I1202 11:55:42.109748 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 02 11:55:42 crc kubenswrapper[4721]: I1202 11:55:42.149307 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 02 11:55:42 crc kubenswrapper[4721]: I1202 11:55:42.287666 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 02 11:55:42 crc kubenswrapper[4721]: I1202 11:55:42.357818 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 02 11:55:42 crc kubenswrapper[4721]: I1202 11:55:42.372144 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 02 11:55:42 crc kubenswrapper[4721]: I1202 11:55:42.478180 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 02 11:55:42 crc kubenswrapper[4721]: I1202 11:55:42.484554 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 02 11:55:42 crc kubenswrapper[4721]: I1202 11:55:42.531960 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 02 11:55:42 crc kubenswrapper[4721]: I1202 11:55:42.688645 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 02 11:55:42 crc kubenswrapper[4721]: I1202 11:55:42.726893 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 02 11:55:42 crc kubenswrapper[4721]: I1202 11:55:42.735371 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 02 11:55:42 crc kubenswrapper[4721]: I1202 11:55:42.838179 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 02 11:55:42 crc kubenswrapper[4721]: I1202 11:55:42.860655 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 02 11:55:42 crc kubenswrapper[4721]: I1202 11:55:42.906661 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 02 11:55:42 crc kubenswrapper[4721]: I1202 11:55:42.942254 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 02 11:55:43 crc kubenswrapper[4721]: I1202 11:55:43.041389 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 02 11:55:43 crc kubenswrapper[4721]: I1202 11:55:43.097769 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 02 11:55:43 crc kubenswrapper[4721]: I1202 11:55:43.291954 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 02 11:55:43 crc kubenswrapper[4721]: I1202 11:55:43.313531 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 02 11:55:43 crc kubenswrapper[4721]: I1202 11:55:43.343768 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 02 11:55:43 crc kubenswrapper[4721]: I1202 11:55:43.390848 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 02 11:55:43 crc kubenswrapper[4721]: I1202 11:55:43.449420 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 02 11:55:43 crc kubenswrapper[4721]: I1202 11:55:43.577540 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 02 11:55:43 crc kubenswrapper[4721]: I1202 11:55:43.577631 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 02 11:55:43 crc kubenswrapper[4721]: I1202 11:55:43.581754 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 02 11:55:43 crc kubenswrapper[4721]: I1202 11:55:43.624912 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 02 11:55:43 crc kubenswrapper[4721]: I1202 11:55:43.662057 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 02 11:55:43 crc kubenswrapper[4721]: I1202 11:55:43.673855 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 02 11:55:43 crc kubenswrapper[4721]: I1202 11:55:43.698368 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 02 11:55:43 crc kubenswrapper[4721]: I1202 11:55:43.893914 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 02 11:55:43 crc kubenswrapper[4721]: I1202 11:55:43.898520 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 02 11:55:44 crc kubenswrapper[4721]: I1202 11:55:44.031909 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 02 11:55:44 crc kubenswrapper[4721]: I1202 11:55:44.043190 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 02 11:55:44 crc kubenswrapper[4721]: I1202 11:55:44.207712 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 02 11:55:44 crc kubenswrapper[4721]: I1202 11:55:44.277514 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 02 11:55:44 crc kubenswrapper[4721]: I1202 11:55:44.311099 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 02 11:55:44 crc kubenswrapper[4721]: I1202 11:55:44.317314 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 02 11:55:44 crc kubenswrapper[4721]: I1202 11:55:44.367373 4721 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 02 11:55:44 crc kubenswrapper[4721]: I1202 11:55:44.371273 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 02 11:55:44 crc kubenswrapper[4721]: I1202 11:55:44.371327 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 02 11:55:44 crc kubenswrapper[4721]: I1202 11:55:44.375877 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 11:55:44 crc kubenswrapper[4721]: I1202 11:55:44.391522 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=15.391502851 podStartE2EDuration="15.391502851s" podCreationTimestamp="2025-12-02 11:55:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:55:44.388884512 +0000 UTC m=+274.127434881" watchObservedRunningTime="2025-12-02 11:55:44.391502851 +0000 UTC m=+274.130053200" Dec 02 11:55:44 crc kubenswrapper[4721]: I1202 11:55:44.423731 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 02 11:55:44 crc kubenswrapper[4721]: I1202 11:55:44.460879 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 02 11:55:44 crc kubenswrapper[4721]: I1202 11:55:44.652807 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 02 11:55:44 crc kubenswrapper[4721]: I1202 11:55:44.670595 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 02 11:55:44 crc kubenswrapper[4721]: I1202 11:55:44.677227 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 02 11:55:44 crc kubenswrapper[4721]: I1202 11:55:44.707125 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 02 11:55:44 crc kubenswrapper[4721]: I1202 11:55:44.771627 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 02 11:55:44 crc kubenswrapper[4721]: I1202 11:55:44.825108 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 02 11:55:44 crc kubenswrapper[4721]: I1202 11:55:44.846124 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 02 11:55:44 crc kubenswrapper[4721]: I1202 11:55:44.855056 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 02 11:55:44 crc kubenswrapper[4721]: I1202 11:55:44.938684 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 02 11:55:44 crc kubenswrapper[4721]: I1202 11:55:44.969258 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 02 11:55:44 crc kubenswrapper[4721]: I1202 11:55:44.980710 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 02 11:55:44 crc kubenswrapper[4721]: I1202 11:55:44.987799 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 02 11:55:45 crc kubenswrapper[4721]: I1202 11:55:45.007164 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 02 11:55:45 crc kubenswrapper[4721]: I1202 11:55:45.023539 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 02 11:55:45 crc kubenswrapper[4721]: I1202 11:55:45.053865 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 02 11:55:45 crc kubenswrapper[4721]: I1202 11:55:45.082500 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 02 11:55:45 crc kubenswrapper[4721]: I1202 11:55:45.173247 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 02 11:55:45 crc kubenswrapper[4721]: I1202 11:55:45.188615 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 02 11:55:45 crc kubenswrapper[4721]: I1202 11:55:45.252860 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 02 11:55:45 crc kubenswrapper[4721]: I1202 11:55:45.339204 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 02 11:55:45 crc kubenswrapper[4721]: I1202 11:55:45.353377 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 02 11:55:45 crc kubenswrapper[4721]: I1202 11:55:45.404178 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 02 11:55:45 crc kubenswrapper[4721]: I1202 11:55:45.461159 4721 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 02 11:55:45 crc kubenswrapper[4721]: I1202 11:55:45.571069 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 02 11:55:45 crc kubenswrapper[4721]: I1202 11:55:45.579454 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 02 11:55:45 crc kubenswrapper[4721]: I1202 11:55:45.624889 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 02 11:55:45 crc kubenswrapper[4721]: I1202 11:55:45.642178 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 02 11:55:45 crc kubenswrapper[4721]: I1202 11:55:45.676420 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 02 11:55:45 crc kubenswrapper[4721]: I1202 11:55:45.712216 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 02 11:55:45 crc kubenswrapper[4721]: I1202 11:55:45.713582 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 02 11:55:45 crc kubenswrapper[4721]: I1202 11:55:45.722599 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 02 11:55:45 crc kubenswrapper[4721]: I1202 11:55:45.868217 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 02 11:55:45 crc kubenswrapper[4721]: I1202 11:55:45.899687 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 02 11:55:45 crc kubenswrapper[4721]: I1202 11:55:45.929756 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 02 11:55:46 crc kubenswrapper[4721]: I1202 11:55:46.090171 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 02 11:55:46 crc kubenswrapper[4721]: I1202 11:55:46.148703 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 02 11:55:46 crc kubenswrapper[4721]: I1202 11:55:46.209891 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 02 11:55:46 crc kubenswrapper[4721]: I1202 11:55:46.248430 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 02 11:55:46 crc kubenswrapper[4721]: I1202 11:55:46.310076 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 02 11:55:46 crc kubenswrapper[4721]: I1202 11:55:46.323926 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 02 11:55:46 crc kubenswrapper[4721]: I1202 11:55:46.342447 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 02 11:55:46 crc kubenswrapper[4721]: I1202 11:55:46.369023 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 02 11:55:46 crc kubenswrapper[4721]: I1202 11:55:46.463794 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 02 11:55:46 crc kubenswrapper[4721]: I1202 11:55:46.480622 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 02 11:55:46 crc kubenswrapper[4721]: I1202 11:55:46.504439 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 02 11:55:46 crc kubenswrapper[4721]: I1202 11:55:46.605101 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 02 11:55:46 crc kubenswrapper[4721]: I1202 11:55:46.732322 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 02 11:55:46 crc kubenswrapper[4721]: I1202 11:55:46.820240 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 02 11:55:46 crc kubenswrapper[4721]: I1202 11:55:46.856127 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 02 11:55:46 crc kubenswrapper[4721]: I1202 11:55:46.970051 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 02 11:55:47 crc kubenswrapper[4721]: I1202 11:55:47.005897 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 02 11:55:47 crc kubenswrapper[4721]: I1202 11:55:47.007778 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 02 11:55:47 crc kubenswrapper[4721]: I1202 11:55:47.011786 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 02 11:55:47 crc kubenswrapper[4721]: I1202 11:55:47.095810 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 02 11:55:47 crc kubenswrapper[4721]: I1202 11:55:47.138544 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 02 11:55:47 crc kubenswrapper[4721]: I1202 11:55:47.149588 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 02 11:55:47 crc kubenswrapper[4721]: I1202 11:55:47.227018 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 02 11:55:47 crc kubenswrapper[4721]: I1202 11:55:47.227621 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 02 11:55:47 crc kubenswrapper[4721]: I1202 11:55:47.378811 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 02 11:55:47 crc kubenswrapper[4721]: I1202 11:55:47.414670 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 02 11:55:47 crc kubenswrapper[4721]: I1202 11:55:47.498488 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 02 11:55:47 crc kubenswrapper[4721]: I1202 11:55:47.526709 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 02 11:55:47 crc kubenswrapper[4721]: I1202 11:55:47.534850 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 02 11:55:47 crc kubenswrapper[4721]: I1202 11:55:47.627458 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 02 11:55:47 crc kubenswrapper[4721]: I1202 11:55:47.703735 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 02 11:55:47 crc kubenswrapper[4721]: I1202 11:55:47.757708 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 02 11:55:47 crc kubenswrapper[4721]: I1202 11:55:47.806892 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 02 11:55:47 crc kubenswrapper[4721]: I1202 11:55:47.808337 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 02 11:55:47 crc kubenswrapper[4721]: I1202 11:55:47.870034 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 02 11:55:47 crc kubenswrapper[4721]: I1202 11:55:47.905825 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 02 11:55:48 crc kubenswrapper[4721]: I1202 11:55:48.023623 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 02 11:55:48 crc kubenswrapper[4721]: I1202 11:55:48.037664 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 02 11:55:48 crc kubenswrapper[4721]: I1202 11:55:48.075520 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 02 11:55:48 crc kubenswrapper[4721]: I1202 11:55:48.119063 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 02 11:55:48 crc kubenswrapper[4721]: I1202 11:55:48.135631 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 02 11:55:48 crc kubenswrapper[4721]: I1202 11:55:48.157989 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 02 11:55:48 crc kubenswrapper[4721]: I1202 11:55:48.318808 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 02 11:55:48 crc kubenswrapper[4721]: I1202 11:55:48.332443 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 02 11:55:48 crc kubenswrapper[4721]: I1202 11:55:48.422062 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 02 11:55:48 crc kubenswrapper[4721]: I1202 11:55:48.444697 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 02 11:55:48 crc kubenswrapper[4721]: I1202 11:55:48.527886 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 02 11:55:48 crc kubenswrapper[4721]: I1202 11:55:48.564116 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 02 11:55:48 crc kubenswrapper[4721]: I1202 11:55:48.606816 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 02 11:55:48 crc kubenswrapper[4721]: I1202 11:55:48.620154 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 02 11:55:48 crc kubenswrapper[4721]: I1202 11:55:48.715311 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 02 11:55:48 crc kubenswrapper[4721]: I1202 11:55:48.737811 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 02 11:55:48 crc kubenswrapper[4721]: I1202 11:55:48.805491 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 02 11:55:48 crc kubenswrapper[4721]: I1202 11:55:48.816179 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 02 11:55:48 crc kubenswrapper[4721]: I1202 11:55:48.843354 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 02 11:55:48 crc kubenswrapper[4721]: I1202 11:55:48.865062 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 02 11:55:48 crc kubenswrapper[4721]: I1202 11:55:48.867034 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 02 11:55:48 crc kubenswrapper[4721]: I1202 11:55:48.936831 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 02 11:55:48 crc kubenswrapper[4721]: I1202 11:55:48.979123 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 02 11:55:48 crc kubenswrapper[4721]: I1202 11:55:48.991024 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 02 11:55:49 crc kubenswrapper[4721]: I1202 11:55:49.075180 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 02 11:55:49 crc kubenswrapper[4721]: I1202 11:55:49.078943 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 02 11:55:49 crc kubenswrapper[4721]: I1202 11:55:49.200791 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 02 11:55:49 crc kubenswrapper[4721]: I1202 11:55:49.202499 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 02 11:55:49 crc kubenswrapper[4721]: I1202 11:55:49.240247 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 02 11:55:49 crc kubenswrapper[4721]: I1202 11:55:49.316225 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 02 11:55:49 crc kubenswrapper[4721]: I1202 11:55:49.378808 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 02 11:55:49 crc kubenswrapper[4721]: I1202 11:55:49.400821 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 02 11:55:49 crc kubenswrapper[4721]: I1202 11:55:49.485494 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 02 11:55:49 crc kubenswrapper[4721]: I1202 11:55:49.516037 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 02 11:55:49 crc kubenswrapper[4721]: I1202 11:55:49.528967 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 02 11:55:49 crc kubenswrapper[4721]: I1202 11:55:49.738426 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 02 11:55:49 crc kubenswrapper[4721]: I1202 11:55:49.840253 4721 scope.go:117] "RemoveContainer" containerID="dd5cdd7fdee5174e89b9ac2244bfcec28086ab481db70e88c4102e2b1126e2f5" Dec 02 11:55:49 crc kubenswrapper[4721]: E1202 11:55:49.840793 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oauth-openshift\" with CrashLoopBackOff: \"back-off 20s restarting failed container=oauth-openshift pod=oauth-openshift-bc9f7ddc4-tnr46_openshift-authentication(e73f2591-3f5f-48ba-9bb2-fc5cb5e90893)\"" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" podUID="e73f2591-3f5f-48ba-9bb2-fc5cb5e90893" Dec 02 11:55:49 crc kubenswrapper[4721]: I1202 11:55:49.944706 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 02 11:55:49 crc kubenswrapper[4721]: I1202 11:55:49.963742 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 02 11:55:50 crc kubenswrapper[4721]: I1202 11:55:50.023970 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 02 11:55:50 crc kubenswrapper[4721]: I1202 11:55:50.204839 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 02 11:55:50 crc kubenswrapper[4721]: I1202 11:55:50.335071 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 02 11:55:50 crc kubenswrapper[4721]: I1202 11:55:50.338196 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 02 11:55:50 crc kubenswrapper[4721]: I1202 11:55:50.343237 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 02 11:55:50 crc kubenswrapper[4721]: I1202 11:55:50.344550 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 02 11:55:50 crc kubenswrapper[4721]: I1202 11:55:50.373871 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 02 11:55:50 crc kubenswrapper[4721]: I1202 11:55:50.434870 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 02 11:55:50 crc kubenswrapper[4721]: I1202 11:55:50.533109 4721 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 02 11:55:50 crc kubenswrapper[4721]: I1202 11:55:50.561901 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 02 11:55:50 crc kubenswrapper[4721]: I1202 11:55:50.563472 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 02 11:55:50 crc kubenswrapper[4721]: I1202 11:55:50.724327 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 02 11:55:50 crc kubenswrapper[4721]: I1202 11:55:50.792734 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 02 11:55:50 crc kubenswrapper[4721]: I1202 11:55:50.818371 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 02 11:55:50 crc kubenswrapper[4721]: I1202 11:55:50.895085 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 02 11:55:51 crc kubenswrapper[4721]: I1202 11:55:51.068464 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 02 11:55:51 crc kubenswrapper[4721]: I1202 11:55:51.139991 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 02 11:55:51 crc kubenswrapper[4721]: I1202 11:55:51.187173 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 02 11:55:51 crc kubenswrapper[4721]: I1202 11:55:51.261940 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 02 11:55:51 crc kubenswrapper[4721]: I1202 11:55:51.269444 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 02 11:55:51 crc kubenswrapper[4721]: I1202 11:55:51.336126 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 02 11:55:51 crc kubenswrapper[4721]: I1202 11:55:51.432681 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 02 11:55:51 crc kubenswrapper[4721]: I1202 11:55:51.578837 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 02 11:55:51 crc kubenswrapper[4721]: I1202 11:55:51.580148 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 02 11:55:51 crc kubenswrapper[4721]: I1202 11:55:51.597961 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 02 11:55:51 crc kubenswrapper[4721]: I1202 11:55:51.600797 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 02 11:55:51 crc kubenswrapper[4721]: I1202 11:55:51.640532 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 02 11:55:51 crc kubenswrapper[4721]: I1202 11:55:51.651971 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 02 11:55:51 crc kubenswrapper[4721]: I1202 11:55:51.748516 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 02 11:55:51 crc kubenswrapper[4721]: I1202 11:55:51.751908 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 02 11:55:51 crc kubenswrapper[4721]: I1202 11:55:51.814016 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 02 11:55:51 crc kubenswrapper[4721]: I1202 11:55:51.815290 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 02 11:55:51 crc kubenswrapper[4721]: I1202 11:55:51.927844 4721 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 02 11:55:51 crc kubenswrapper[4721]: I1202 11:55:51.928146 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://2b530ab7014e4ee84125b7ac0708824485e1e0a15b371160883ef2c506925042" gracePeriod=5 Dec 02 11:55:51 crc kubenswrapper[4721]: I1202 11:55:51.945765 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 02 11:55:52 crc kubenswrapper[4721]: I1202 11:55:52.005659 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 02 11:55:52 crc kubenswrapper[4721]: I1202 11:55:52.042258 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 02 11:55:52 crc kubenswrapper[4721]: I1202 11:55:52.049136 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 02 11:55:52 crc kubenswrapper[4721]: I1202 11:55:52.084728 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 02 11:55:52 crc kubenswrapper[4721]: I1202 11:55:52.205870 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 02 11:55:52 crc kubenswrapper[4721]: I1202 11:55:52.305819 4721 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 02 11:55:52 crc kubenswrapper[4721]: I1202 11:55:52.653313 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 02 11:55:52 crc kubenswrapper[4721]: I1202 11:55:52.655332 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 02 11:55:52 crc kubenswrapper[4721]: I1202 11:55:52.691603 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 02 11:55:52 crc kubenswrapper[4721]: I1202 11:55:52.828639 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 02 11:55:52 crc kubenswrapper[4721]: I1202 11:55:52.885841 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 02 11:55:52 crc kubenswrapper[4721]: I1202 11:55:52.947785 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 02 11:55:52 crc kubenswrapper[4721]: I1202 11:55:52.991925 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 02 11:55:53 crc kubenswrapper[4721]: I1202 11:55:53.035799 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 02 11:55:53 crc kubenswrapper[4721]: I1202 11:55:53.115766 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 02 11:55:53 crc kubenswrapper[4721]: I1202 11:55:53.124993 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 02 11:55:53 crc kubenswrapper[4721]: I1202 11:55:53.199309 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 02 11:55:53 crc kubenswrapper[4721]: I1202 11:55:53.221337 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 02 11:55:53 crc kubenswrapper[4721]: I1202 11:55:53.227353 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 02 11:55:53 crc kubenswrapper[4721]: I1202 11:55:53.287733 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 02 11:55:53 crc kubenswrapper[4721]: I1202 11:55:53.330812 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 02 11:55:53 crc kubenswrapper[4721]: I1202 11:55:53.406288 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 02 11:55:53 crc kubenswrapper[4721]: I1202 11:55:53.412536 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 02 11:55:53 crc kubenswrapper[4721]: I1202 11:55:53.435763 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 02 11:55:53 crc kubenswrapper[4721]: I1202 11:55:53.519608 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 02 11:55:53 crc kubenswrapper[4721]: I1202 11:55:53.671465 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 02 11:55:53 crc kubenswrapper[4721]: I1202 11:55:53.726470 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 02 11:55:54 crc kubenswrapper[4721]: I1202 11:55:54.027811 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 02 11:55:54 crc kubenswrapper[4721]: I1202 11:55:54.086120 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 02 11:55:54 crc kubenswrapper[4721]: I1202 11:55:54.168290 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 02 11:55:54 crc kubenswrapper[4721]: I1202 11:55:54.292432 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 02 11:55:54 crc kubenswrapper[4721]: I1202 11:55:54.366128 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 02 11:55:54 crc kubenswrapper[4721]: I1202 11:55:54.393568 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 02 11:55:54 crc kubenswrapper[4721]: I1202 11:55:54.417105 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 02 11:55:54 crc kubenswrapper[4721]: I1202 11:55:54.529487 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 02 11:55:54 crc kubenswrapper[4721]: I1202 11:55:54.559672 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 02 11:55:54 crc kubenswrapper[4721]: I1202 11:55:54.744880 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 02 11:55:54 crc kubenswrapper[4721]: I1202 11:55:54.936767 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 02 11:55:54 crc kubenswrapper[4721]: I1202 11:55:54.981319 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 02 11:55:55 crc kubenswrapper[4721]: I1202 11:55:55.003933 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 02 11:55:55 crc kubenswrapper[4721]: I1202 11:55:55.074200 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 02 11:55:55 crc kubenswrapper[4721]: I1202 11:55:55.139373 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 02 11:55:55 crc kubenswrapper[4721]: I1202 11:55:55.352309 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 02 11:55:55 crc kubenswrapper[4721]: I1202 11:55:55.416782 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 02 11:55:55 crc kubenswrapper[4721]: I1202 11:55:55.468412 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 02 11:55:55 crc kubenswrapper[4721]: I1202 11:55:55.571697 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 02 11:55:55 crc kubenswrapper[4721]: I1202 11:55:55.803263 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 02 11:55:56 crc kubenswrapper[4721]: I1202 11:55:56.038642 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 02 11:55:57 crc kubenswrapper[4721]: I1202 11:55:57.526500 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 02 11:55:57 crc kubenswrapper[4721]: I1202 11:55:57.526886 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 11:55:57 crc kubenswrapper[4721]: I1202 11:55:57.663167 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 11:55:57 crc kubenswrapper[4721]: I1202 11:55:57.663241 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 11:55:57 crc kubenswrapper[4721]: I1202 11:55:57.663306 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 11:55:57 crc kubenswrapper[4721]: I1202 11:55:57.663384 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 11:55:57 crc kubenswrapper[4721]: I1202 11:55:57.663422 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 11:55:57 crc kubenswrapper[4721]: I1202 11:55:57.663425 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 11:55:57 crc kubenswrapper[4721]: I1202 11:55:57.663448 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 11:55:57 crc kubenswrapper[4721]: I1202 11:55:57.663488 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 11:55:57 crc kubenswrapper[4721]: I1202 11:55:57.663569 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 11:55:57 crc kubenswrapper[4721]: I1202 11:55:57.663800 4721 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 02 11:55:57 crc kubenswrapper[4721]: I1202 11:55:57.663824 4721 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 02 11:55:57 crc kubenswrapper[4721]: I1202 11:55:57.663841 4721 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 02 11:55:57 crc kubenswrapper[4721]: I1202 11:55:57.663857 4721 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 02 11:55:57 crc kubenswrapper[4721]: I1202 11:55:57.673329 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 11:55:57 crc kubenswrapper[4721]: I1202 11:55:57.712620 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 02 11:55:57 crc kubenswrapper[4721]: I1202 11:55:57.712667 4721 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="2b530ab7014e4ee84125b7ac0708824485e1e0a15b371160883ef2c506925042" exitCode=137 Dec 02 11:55:57 crc kubenswrapper[4721]: I1202 11:55:57.712708 4721 scope.go:117] "RemoveContainer" containerID="2b530ab7014e4ee84125b7ac0708824485e1e0a15b371160883ef2c506925042" Dec 02 11:55:57 crc kubenswrapper[4721]: I1202 11:55:57.712743 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 11:55:57 crc kubenswrapper[4721]: I1202 11:55:57.736463 4721 scope.go:117] "RemoveContainer" containerID="2b530ab7014e4ee84125b7ac0708824485e1e0a15b371160883ef2c506925042" Dec 02 11:55:57 crc kubenswrapper[4721]: E1202 11:55:57.737027 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b530ab7014e4ee84125b7ac0708824485e1e0a15b371160883ef2c506925042\": container with ID starting with 2b530ab7014e4ee84125b7ac0708824485e1e0a15b371160883ef2c506925042 not found: ID does not exist" containerID="2b530ab7014e4ee84125b7ac0708824485e1e0a15b371160883ef2c506925042" Dec 02 11:55:57 crc kubenswrapper[4721]: I1202 11:55:57.737073 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b530ab7014e4ee84125b7ac0708824485e1e0a15b371160883ef2c506925042"} err="failed to get container status \"2b530ab7014e4ee84125b7ac0708824485e1e0a15b371160883ef2c506925042\": rpc error: code = NotFound desc = could not find container \"2b530ab7014e4ee84125b7ac0708824485e1e0a15b371160883ef2c506925042\": container with ID starting with 2b530ab7014e4ee84125b7ac0708824485e1e0a15b371160883ef2c506925042 not found: ID does not exist" Dec 02 11:55:57 crc kubenswrapper[4721]: I1202 11:55:57.765027 4721 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 02 11:55:58 crc kubenswrapper[4721]: I1202 11:55:58.852197 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 02 11:56:03 crc kubenswrapper[4721]: I1202 11:56:03.959178 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 02 11:56:04 crc kubenswrapper[4721]: I1202 11:56:04.839765 4721 scope.go:117] "RemoveContainer" containerID="dd5cdd7fdee5174e89b9ac2244bfcec28086ab481db70e88c4102e2b1126e2f5" Dec 02 11:56:05 crc kubenswrapper[4721]: I1202 11:56:05.762475 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-bc9f7ddc4-tnr46_e73f2591-3f5f-48ba-9bb2-fc5cb5e90893/oauth-openshift/2.log" Dec 02 11:56:05 crc kubenswrapper[4721]: I1202 11:56:05.762825 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" event={"ID":"e73f2591-3f5f-48ba-9bb2-fc5cb5e90893","Type":"ContainerStarted","Data":"0cf27004920a66b40a1de6632ae5ef339a850dc999ee37c14cf37abd9eff3d53"} Dec 02 11:56:05 crc kubenswrapper[4721]: I1202 11:56:05.763292 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:56:05 crc kubenswrapper[4721]: I1202 11:56:05.769474 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" Dec 02 11:56:05 crc kubenswrapper[4721]: I1202 11:56:05.782125 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-bc9f7ddc4-tnr46" podStartSLOduration=91.782106394 podStartE2EDuration="1m31.782106394s" podCreationTimestamp="2025-12-02 11:54:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:56:05.778825326 +0000 UTC m=+295.517375685" watchObservedRunningTime="2025-12-02 11:56:05.782106394 +0000 UTC m=+295.520656743" Dec 02 11:56:09 crc kubenswrapper[4721]: I1202 11:56:09.791848 4721 generic.go:334] "Generic (PLEG): container finished" podID="5754f3db-279a-4340-9354-8709b29e0398" containerID="b137ec3317f50b7b548136e99fcada74e65bb27ef32b783ecb48d77844237afa" exitCode=0 Dec 02 11:56:09 crc kubenswrapper[4721]: I1202 11:56:09.792047 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-gtb8k" event={"ID":"5754f3db-279a-4340-9354-8709b29e0398","Type":"ContainerDied","Data":"b137ec3317f50b7b548136e99fcada74e65bb27ef32b783ecb48d77844237afa"} Dec 02 11:56:09 crc kubenswrapper[4721]: I1202 11:56:09.792987 4721 scope.go:117] "RemoveContainer" containerID="b137ec3317f50b7b548136e99fcada74e65bb27ef32b783ecb48d77844237afa" Dec 02 11:56:10 crc kubenswrapper[4721]: I1202 11:56:10.693302 4721 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Dec 02 11:56:10 crc kubenswrapper[4721]: I1202 11:56:10.799956 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-gtb8k" event={"ID":"5754f3db-279a-4340-9354-8709b29e0398","Type":"ContainerStarted","Data":"d864e8ad83784da4bf5b25b3ac2f40fa762c2a9febf27227735c0a5ec525d0a0"} Dec 02 11:56:10 crc kubenswrapper[4721]: I1202 11:56:10.800313 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-gtb8k" Dec 02 11:56:10 crc kubenswrapper[4721]: I1202 11:56:10.803774 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-gtb8k" Dec 02 11:56:15 crc kubenswrapper[4721]: I1202 11:56:15.356509 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 02 11:56:21 crc kubenswrapper[4721]: I1202 11:56:21.442025 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.158960 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-mxs2d"] Dec 02 11:57:13 crc kubenswrapper[4721]: E1202 11:57:13.159635 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.159651 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 02 11:57:13 crc kubenswrapper[4721]: E1202 11:57:13.159666 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="855a31ab-b04f-4907-be30-115efd42a84e" containerName="installer" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.159674 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="855a31ab-b04f-4907-be30-115efd42a84e" containerName="installer" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.159802 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.159817 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="855a31ab-b04f-4907-be30-115efd42a84e" containerName="installer" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.160241 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.178530 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-mxs2d"] Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.267735 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/aacf968d-3537-46f0-8b3c-71c69ab07644-ca-trust-extracted\") pod \"image-registry-66df7c8f76-mxs2d\" (UID: \"aacf968d-3537-46f0-8b3c-71c69ab07644\") " pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.267777 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/aacf968d-3537-46f0-8b3c-71c69ab07644-registry-tls\") pod \"image-registry-66df7c8f76-mxs2d\" (UID: \"aacf968d-3537-46f0-8b3c-71c69ab07644\") " pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.267793 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjsxl\" (UniqueName: \"kubernetes.io/projected/aacf968d-3537-46f0-8b3c-71c69ab07644-kube-api-access-rjsxl\") pod \"image-registry-66df7c8f76-mxs2d\" (UID: \"aacf968d-3537-46f0-8b3c-71c69ab07644\") " pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.267810 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/aacf968d-3537-46f0-8b3c-71c69ab07644-registry-certificates\") pod \"image-registry-66df7c8f76-mxs2d\" (UID: \"aacf968d-3537-46f0-8b3c-71c69ab07644\") " pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.267905 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-mxs2d\" (UID: \"aacf968d-3537-46f0-8b3c-71c69ab07644\") " pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.268025 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/aacf968d-3537-46f0-8b3c-71c69ab07644-trusted-ca\") pod \"image-registry-66df7c8f76-mxs2d\" (UID: \"aacf968d-3537-46f0-8b3c-71c69ab07644\") " pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.268092 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/aacf968d-3537-46f0-8b3c-71c69ab07644-installation-pull-secrets\") pod \"image-registry-66df7c8f76-mxs2d\" (UID: \"aacf968d-3537-46f0-8b3c-71c69ab07644\") " pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.268129 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/aacf968d-3537-46f0-8b3c-71c69ab07644-bound-sa-token\") pod \"image-registry-66df7c8f76-mxs2d\" (UID: \"aacf968d-3537-46f0-8b3c-71c69ab07644\") " pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.285779 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-mxs2d\" (UID: \"aacf968d-3537-46f0-8b3c-71c69ab07644\") " pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.369292 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/aacf968d-3537-46f0-8b3c-71c69ab07644-trusted-ca\") pod \"image-registry-66df7c8f76-mxs2d\" (UID: \"aacf968d-3537-46f0-8b3c-71c69ab07644\") " pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.369378 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/aacf968d-3537-46f0-8b3c-71c69ab07644-installation-pull-secrets\") pod \"image-registry-66df7c8f76-mxs2d\" (UID: \"aacf968d-3537-46f0-8b3c-71c69ab07644\") " pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.369415 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/aacf968d-3537-46f0-8b3c-71c69ab07644-bound-sa-token\") pod \"image-registry-66df7c8f76-mxs2d\" (UID: \"aacf968d-3537-46f0-8b3c-71c69ab07644\") " pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.369461 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/aacf968d-3537-46f0-8b3c-71c69ab07644-ca-trust-extracted\") pod \"image-registry-66df7c8f76-mxs2d\" (UID: \"aacf968d-3537-46f0-8b3c-71c69ab07644\") " pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.369482 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/aacf968d-3537-46f0-8b3c-71c69ab07644-registry-tls\") pod \"image-registry-66df7c8f76-mxs2d\" (UID: \"aacf968d-3537-46f0-8b3c-71c69ab07644\") " pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.369506 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/aacf968d-3537-46f0-8b3c-71c69ab07644-registry-certificates\") pod \"image-registry-66df7c8f76-mxs2d\" (UID: \"aacf968d-3537-46f0-8b3c-71c69ab07644\") " pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.369530 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjsxl\" (UniqueName: \"kubernetes.io/projected/aacf968d-3537-46f0-8b3c-71c69ab07644-kube-api-access-rjsxl\") pod \"image-registry-66df7c8f76-mxs2d\" (UID: \"aacf968d-3537-46f0-8b3c-71c69ab07644\") " pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.370657 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/aacf968d-3537-46f0-8b3c-71c69ab07644-registry-certificates\") pod \"image-registry-66df7c8f76-mxs2d\" (UID: \"aacf968d-3537-46f0-8b3c-71c69ab07644\") " pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.370942 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/aacf968d-3537-46f0-8b3c-71c69ab07644-trusted-ca\") pod \"image-registry-66df7c8f76-mxs2d\" (UID: \"aacf968d-3537-46f0-8b3c-71c69ab07644\") " pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.371075 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/aacf968d-3537-46f0-8b3c-71c69ab07644-ca-trust-extracted\") pod \"image-registry-66df7c8f76-mxs2d\" (UID: \"aacf968d-3537-46f0-8b3c-71c69ab07644\") " pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.378849 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/aacf968d-3537-46f0-8b3c-71c69ab07644-installation-pull-secrets\") pod \"image-registry-66df7c8f76-mxs2d\" (UID: \"aacf968d-3537-46f0-8b3c-71c69ab07644\") " pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.379233 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/aacf968d-3537-46f0-8b3c-71c69ab07644-registry-tls\") pod \"image-registry-66df7c8f76-mxs2d\" (UID: \"aacf968d-3537-46f0-8b3c-71c69ab07644\") " pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.387683 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/aacf968d-3537-46f0-8b3c-71c69ab07644-bound-sa-token\") pod \"image-registry-66df7c8f76-mxs2d\" (UID: \"aacf968d-3537-46f0-8b3c-71c69ab07644\") " pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.388101 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjsxl\" (UniqueName: \"kubernetes.io/projected/aacf968d-3537-46f0-8b3c-71c69ab07644-kube-api-access-rjsxl\") pod \"image-registry-66df7c8f76-mxs2d\" (UID: \"aacf968d-3537-46f0-8b3c-71c69ab07644\") " pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.476483 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" Dec 02 11:57:13 crc kubenswrapper[4721]: I1202 11:57:13.899144 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-mxs2d"] Dec 02 11:57:14 crc kubenswrapper[4721]: I1202 11:57:14.187790 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" event={"ID":"aacf968d-3537-46f0-8b3c-71c69ab07644","Type":"ContainerStarted","Data":"d212976ece6ef37367d15244f1b1c74caa63c449a5e7ac1ef3d38038a34a4e42"} Dec 02 11:57:14 crc kubenswrapper[4721]: I1202 11:57:14.187968 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" event={"ID":"aacf968d-3537-46f0-8b3c-71c69ab07644","Type":"ContainerStarted","Data":"466724ae44fa223d2ce29f48a62759a289f45ca6d0c691318897b2cca6fc2cc1"} Dec 02 11:57:14 crc kubenswrapper[4721]: I1202 11:57:14.188002 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" Dec 02 11:57:14 crc kubenswrapper[4721]: I1202 11:57:14.204569 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" podStartSLOduration=1.204553239 podStartE2EDuration="1.204553239s" podCreationTimestamp="2025-12-02 11:57:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:57:14.201963112 +0000 UTC m=+363.940513461" watchObservedRunningTime="2025-12-02 11:57:14.204553239 +0000 UTC m=+363.943103588" Dec 02 11:57:25 crc kubenswrapper[4721]: I1202 11:57:25.702837 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mcr6s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 11:57:25 crc kubenswrapper[4721]: I1202 11:57:25.703526 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 11:57:33 crc kubenswrapper[4721]: I1202 11:57:33.489526 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-mxs2d" Dec 02 11:57:33 crc kubenswrapper[4721]: I1202 11:57:33.555626 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-tkr2s"] Dec 02 11:57:51 crc kubenswrapper[4721]: I1202 11:57:51.555644 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8fw86"] Dec 02 11:57:51 crc kubenswrapper[4721]: I1202 11:57:51.558230 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8fw86" podUID="94f45fa8-3517-4376-b5c0-8d6cf0254027" containerName="registry-server" containerID="cri-o://477cba68f14762f95f51d20ad48f30ae54fb8041127c7c3e3c602b5494104e9e" gracePeriod=30 Dec 02 11:57:51 crc kubenswrapper[4721]: I1202 11:57:51.568806 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4k25r"] Dec 02 11:57:51 crc kubenswrapper[4721]: I1202 11:57:51.569236 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4k25r" podUID="f1e35ec2-7693-4103-97e1-3304b8ac93f4" containerName="registry-server" containerID="cri-o://f242372eeed94c90970ae44510bb1d6efd1b66e4504ab8b24938701c3189a833" gracePeriod=30 Dec 02 11:57:51 crc kubenswrapper[4721]: I1202 11:57:51.598304 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gtb8k"] Dec 02 11:57:51 crc kubenswrapper[4721]: I1202 11:57:51.598552 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-gtb8k" podUID="5754f3db-279a-4340-9354-8709b29e0398" containerName="marketplace-operator" containerID="cri-o://d864e8ad83784da4bf5b25b3ac2f40fa762c2a9febf27227735c0a5ec525d0a0" gracePeriod=30 Dec 02 11:57:51 crc kubenswrapper[4721]: I1202 11:57:51.605403 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4qzrw"] Dec 02 11:57:51 crc kubenswrapper[4721]: I1202 11:57:51.605641 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4qzrw" podUID="5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44" containerName="registry-server" containerID="cri-o://fcf291d976dcf2bfb09e3035042d0f980b0d6761303b0297455c70a6e8966794" gracePeriod=30 Dec 02 11:57:51 crc kubenswrapper[4721]: I1202 11:57:51.616542 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-24l7f"] Dec 02 11:57:51 crc kubenswrapper[4721]: I1202 11:57:51.616997 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-24l7f" podUID="9a704275-5d23-4f38-805d-e0f8be9b50dc" containerName="registry-server" containerID="cri-o://2bd09f7eacf82d6f397f3ed03f094c38f9a4881e9feef857912e824ba14450f0" gracePeriod=30 Dec 02 11:57:51 crc kubenswrapper[4721]: I1202 11:57:51.643823 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-w6v9r"] Dec 02 11:57:51 crc kubenswrapper[4721]: I1202 11:57:51.645179 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-w6v9r" Dec 02 11:57:51 crc kubenswrapper[4721]: I1202 11:57:51.669420 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-w6v9r"] Dec 02 11:57:51 crc kubenswrapper[4721]: E1202 11:57:51.671381 4721 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 477cba68f14762f95f51d20ad48f30ae54fb8041127c7c3e3c602b5494104e9e is running failed: container process not found" containerID="477cba68f14762f95f51d20ad48f30ae54fb8041127c7c3e3c602b5494104e9e" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 11:57:51 crc kubenswrapper[4721]: E1202 11:57:51.671818 4721 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 477cba68f14762f95f51d20ad48f30ae54fb8041127c7c3e3c602b5494104e9e is running failed: container process not found" containerID="477cba68f14762f95f51d20ad48f30ae54fb8041127c7c3e3c602b5494104e9e" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 11:57:51 crc kubenswrapper[4721]: E1202 11:57:51.674109 4721 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 477cba68f14762f95f51d20ad48f30ae54fb8041127c7c3e3c602b5494104e9e is running failed: container process not found" containerID="477cba68f14762f95f51d20ad48f30ae54fb8041127c7c3e3c602b5494104e9e" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 11:57:51 crc kubenswrapper[4721]: E1202 11:57:51.674280 4721 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 477cba68f14762f95f51d20ad48f30ae54fb8041127c7c3e3c602b5494104e9e is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-8fw86" podUID="94f45fa8-3517-4376-b5c0-8d6cf0254027" containerName="registry-server" Dec 02 11:57:51 crc kubenswrapper[4721]: I1202 11:57:51.682627 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdws7\" (UniqueName: \"kubernetes.io/projected/9b2d6ebc-35e8-4617-ab74-d40b1a19d84e-kube-api-access-zdws7\") pod \"marketplace-operator-79b997595-w6v9r\" (UID: \"9b2d6ebc-35e8-4617-ab74-d40b1a19d84e\") " pod="openshift-marketplace/marketplace-operator-79b997595-w6v9r" Dec 02 11:57:51 crc kubenswrapper[4721]: I1202 11:57:51.682685 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9b2d6ebc-35e8-4617-ab74-d40b1a19d84e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-w6v9r\" (UID: \"9b2d6ebc-35e8-4617-ab74-d40b1a19d84e\") " pod="openshift-marketplace/marketplace-operator-79b997595-w6v9r" Dec 02 11:57:51 crc kubenswrapper[4721]: I1202 11:57:51.682712 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9b2d6ebc-35e8-4617-ab74-d40b1a19d84e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-w6v9r\" (UID: \"9b2d6ebc-35e8-4617-ab74-d40b1a19d84e\") " pod="openshift-marketplace/marketplace-operator-79b997595-w6v9r" Dec 02 11:57:51 crc kubenswrapper[4721]: I1202 11:57:51.784117 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdws7\" (UniqueName: \"kubernetes.io/projected/9b2d6ebc-35e8-4617-ab74-d40b1a19d84e-kube-api-access-zdws7\") pod \"marketplace-operator-79b997595-w6v9r\" (UID: \"9b2d6ebc-35e8-4617-ab74-d40b1a19d84e\") " pod="openshift-marketplace/marketplace-operator-79b997595-w6v9r" Dec 02 11:57:51 crc kubenswrapper[4721]: I1202 11:57:51.784250 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9b2d6ebc-35e8-4617-ab74-d40b1a19d84e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-w6v9r\" (UID: \"9b2d6ebc-35e8-4617-ab74-d40b1a19d84e\") " pod="openshift-marketplace/marketplace-operator-79b997595-w6v9r" Dec 02 11:57:51 crc kubenswrapper[4721]: I1202 11:57:51.784279 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9b2d6ebc-35e8-4617-ab74-d40b1a19d84e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-w6v9r\" (UID: \"9b2d6ebc-35e8-4617-ab74-d40b1a19d84e\") " pod="openshift-marketplace/marketplace-operator-79b997595-w6v9r" Dec 02 11:57:51 crc kubenswrapper[4721]: I1202 11:57:51.785720 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9b2d6ebc-35e8-4617-ab74-d40b1a19d84e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-w6v9r\" (UID: \"9b2d6ebc-35e8-4617-ab74-d40b1a19d84e\") " pod="openshift-marketplace/marketplace-operator-79b997595-w6v9r" Dec 02 11:57:51 crc kubenswrapper[4721]: I1202 11:57:51.793364 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9b2d6ebc-35e8-4617-ab74-d40b1a19d84e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-w6v9r\" (UID: \"9b2d6ebc-35e8-4617-ab74-d40b1a19d84e\") " pod="openshift-marketplace/marketplace-operator-79b997595-w6v9r" Dec 02 11:57:51 crc kubenswrapper[4721]: I1202 11:57:51.803137 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdws7\" (UniqueName: \"kubernetes.io/projected/9b2d6ebc-35e8-4617-ab74-d40b1a19d84e-kube-api-access-zdws7\") pod \"marketplace-operator-79b997595-w6v9r\" (UID: \"9b2d6ebc-35e8-4617-ab74-d40b1a19d84e\") " pod="openshift-marketplace/marketplace-operator-79b997595-w6v9r" Dec 02 11:57:51 crc kubenswrapper[4721]: E1202 11:57:51.876511 4721 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f242372eeed94c90970ae44510bb1d6efd1b66e4504ab8b24938701c3189a833 is running failed: container process not found" containerID="f242372eeed94c90970ae44510bb1d6efd1b66e4504ab8b24938701c3189a833" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 11:57:51 crc kubenswrapper[4721]: E1202 11:57:51.876844 4721 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f242372eeed94c90970ae44510bb1d6efd1b66e4504ab8b24938701c3189a833 is running failed: container process not found" containerID="f242372eeed94c90970ae44510bb1d6efd1b66e4504ab8b24938701c3189a833" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 11:57:51 crc kubenswrapper[4721]: E1202 11:57:51.877301 4721 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f242372eeed94c90970ae44510bb1d6efd1b66e4504ab8b24938701c3189a833 is running failed: container process not found" containerID="f242372eeed94c90970ae44510bb1d6efd1b66e4504ab8b24938701c3189a833" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 11:57:51 crc kubenswrapper[4721]: E1202 11:57:51.877378 4721 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f242372eeed94c90970ae44510bb1d6efd1b66e4504ab8b24938701c3189a833 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/community-operators-4k25r" podUID="f1e35ec2-7693-4103-97e1-3304b8ac93f4" containerName="registry-server" Dec 02 11:57:51 crc kubenswrapper[4721]: I1202 11:57:51.971766 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-w6v9r" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.032465 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8fw86" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.033621 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-gtb8k" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.033896 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4k25r" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.060128 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4qzrw" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.080971 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-24l7f" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.191414 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94f45fa8-3517-4376-b5c0-8d6cf0254027-catalog-content\") pod \"94f45fa8-3517-4376-b5c0-8d6cf0254027\" (UID: \"94f45fa8-3517-4376-b5c0-8d6cf0254027\") " Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.191494 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dfxx\" (UniqueName: \"kubernetes.io/projected/5754f3db-279a-4340-9354-8709b29e0398-kube-api-access-6dfxx\") pod \"5754f3db-279a-4340-9354-8709b29e0398\" (UID: \"5754f3db-279a-4340-9354-8709b29e0398\") " Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.191514 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5754f3db-279a-4340-9354-8709b29e0398-marketplace-trusted-ca\") pod \"5754f3db-279a-4340-9354-8709b29e0398\" (UID: \"5754f3db-279a-4340-9354-8709b29e0398\") " Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.191563 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44-catalog-content\") pod \"5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44\" (UID: \"5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44\") " Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.191614 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a704275-5d23-4f38-805d-e0f8be9b50dc-utilities\") pod \"9a704275-5d23-4f38-805d-e0f8be9b50dc\" (UID: \"9a704275-5d23-4f38-805d-e0f8be9b50dc\") " Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.191633 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a704275-5d23-4f38-805d-e0f8be9b50dc-catalog-content\") pod \"9a704275-5d23-4f38-805d-e0f8be9b50dc\" (UID: \"9a704275-5d23-4f38-805d-e0f8be9b50dc\") " Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.191654 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lhs9j\" (UniqueName: \"kubernetes.io/projected/9a704275-5d23-4f38-805d-e0f8be9b50dc-kube-api-access-lhs9j\") pod \"9a704275-5d23-4f38-805d-e0f8be9b50dc\" (UID: \"9a704275-5d23-4f38-805d-e0f8be9b50dc\") " Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.191675 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94f45fa8-3517-4376-b5c0-8d6cf0254027-utilities\") pod \"94f45fa8-3517-4376-b5c0-8d6cf0254027\" (UID: \"94f45fa8-3517-4376-b5c0-8d6cf0254027\") " Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.191713 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5754f3db-279a-4340-9354-8709b29e0398-marketplace-operator-metrics\") pod \"5754f3db-279a-4340-9354-8709b29e0398\" (UID: \"5754f3db-279a-4340-9354-8709b29e0398\") " Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.191734 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zpkpf\" (UniqueName: \"kubernetes.io/projected/94f45fa8-3517-4376-b5c0-8d6cf0254027-kube-api-access-zpkpf\") pod \"94f45fa8-3517-4376-b5c0-8d6cf0254027\" (UID: \"94f45fa8-3517-4376-b5c0-8d6cf0254027\") " Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.191764 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1e35ec2-7693-4103-97e1-3304b8ac93f4-utilities\") pod \"f1e35ec2-7693-4103-97e1-3304b8ac93f4\" (UID: \"f1e35ec2-7693-4103-97e1-3304b8ac93f4\") " Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.191789 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44-utilities\") pod \"5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44\" (UID: \"5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44\") " Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.191812 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1e35ec2-7693-4103-97e1-3304b8ac93f4-catalog-content\") pod \"f1e35ec2-7693-4103-97e1-3304b8ac93f4\" (UID: \"f1e35ec2-7693-4103-97e1-3304b8ac93f4\") " Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.191854 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bglfp\" (UniqueName: \"kubernetes.io/projected/5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44-kube-api-access-bglfp\") pod \"5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44\" (UID: \"5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44\") " Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.191876 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bpxgx\" (UniqueName: \"kubernetes.io/projected/f1e35ec2-7693-4103-97e1-3304b8ac93f4-kube-api-access-bpxgx\") pod \"f1e35ec2-7693-4103-97e1-3304b8ac93f4\" (UID: \"f1e35ec2-7693-4103-97e1-3304b8ac93f4\") " Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.192290 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5754f3db-279a-4340-9354-8709b29e0398-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "5754f3db-279a-4340-9354-8709b29e0398" (UID: "5754f3db-279a-4340-9354-8709b29e0398"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.192354 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a704275-5d23-4f38-805d-e0f8be9b50dc-utilities" (OuterVolumeSpecName: "utilities") pod "9a704275-5d23-4f38-805d-e0f8be9b50dc" (UID: "9a704275-5d23-4f38-805d-e0f8be9b50dc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.192523 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94f45fa8-3517-4376-b5c0-8d6cf0254027-utilities" (OuterVolumeSpecName: "utilities") pod "94f45fa8-3517-4376-b5c0-8d6cf0254027" (UID: "94f45fa8-3517-4376-b5c0-8d6cf0254027"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.192894 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1e35ec2-7693-4103-97e1-3304b8ac93f4-utilities" (OuterVolumeSpecName: "utilities") pod "f1e35ec2-7693-4103-97e1-3304b8ac93f4" (UID: "f1e35ec2-7693-4103-97e1-3304b8ac93f4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.193615 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44-utilities" (OuterVolumeSpecName: "utilities") pod "5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44" (UID: "5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.208958 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a704275-5d23-4f38-805d-e0f8be9b50dc-kube-api-access-lhs9j" (OuterVolumeSpecName: "kube-api-access-lhs9j") pod "9a704275-5d23-4f38-805d-e0f8be9b50dc" (UID: "9a704275-5d23-4f38-805d-e0f8be9b50dc"). InnerVolumeSpecName "kube-api-access-lhs9j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.208987 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1e35ec2-7693-4103-97e1-3304b8ac93f4-kube-api-access-bpxgx" (OuterVolumeSpecName: "kube-api-access-bpxgx") pod "f1e35ec2-7693-4103-97e1-3304b8ac93f4" (UID: "f1e35ec2-7693-4103-97e1-3304b8ac93f4"). InnerVolumeSpecName "kube-api-access-bpxgx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.209018 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44-kube-api-access-bglfp" (OuterVolumeSpecName: "kube-api-access-bglfp") pod "5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44" (UID: "5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44"). InnerVolumeSpecName "kube-api-access-bglfp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.209529 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5754f3db-279a-4340-9354-8709b29e0398-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "5754f3db-279a-4340-9354-8709b29e0398" (UID: "5754f3db-279a-4340-9354-8709b29e0398"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.209736 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5754f3db-279a-4340-9354-8709b29e0398-kube-api-access-6dfxx" (OuterVolumeSpecName: "kube-api-access-6dfxx") pod "5754f3db-279a-4340-9354-8709b29e0398" (UID: "5754f3db-279a-4340-9354-8709b29e0398"). InnerVolumeSpecName "kube-api-access-6dfxx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.210799 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94f45fa8-3517-4376-b5c0-8d6cf0254027-kube-api-access-zpkpf" (OuterVolumeSpecName: "kube-api-access-zpkpf") pod "94f45fa8-3517-4376-b5c0-8d6cf0254027" (UID: "94f45fa8-3517-4376-b5c0-8d6cf0254027"). InnerVolumeSpecName "kube-api-access-zpkpf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.215539 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44" (UID: "5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.252477 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94f45fa8-3517-4376-b5c0-8d6cf0254027-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "94f45fa8-3517-4376-b5c0-8d6cf0254027" (UID: "94f45fa8-3517-4376-b5c0-8d6cf0254027"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.267290 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1e35ec2-7693-4103-97e1-3304b8ac93f4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f1e35ec2-7693-4103-97e1-3304b8ac93f4" (UID: "f1e35ec2-7693-4103-97e1-3304b8ac93f4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.293165 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bglfp\" (UniqueName: \"kubernetes.io/projected/5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44-kube-api-access-bglfp\") on node \"crc\" DevicePath \"\"" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.293197 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bpxgx\" (UniqueName: \"kubernetes.io/projected/f1e35ec2-7693-4103-97e1-3304b8ac93f4-kube-api-access-bpxgx\") on node \"crc\" DevicePath \"\"" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.293206 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94f45fa8-3517-4376-b5c0-8d6cf0254027-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.293215 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dfxx\" (UniqueName: \"kubernetes.io/projected/5754f3db-279a-4340-9354-8709b29e0398-kube-api-access-6dfxx\") on node \"crc\" DevicePath \"\"" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.293223 4721 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5754f3db-279a-4340-9354-8709b29e0398-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.293232 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.293242 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a704275-5d23-4f38-805d-e0f8be9b50dc-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.293253 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lhs9j\" (UniqueName: \"kubernetes.io/projected/9a704275-5d23-4f38-805d-e0f8be9b50dc-kube-api-access-lhs9j\") on node \"crc\" DevicePath \"\"" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.293262 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94f45fa8-3517-4376-b5c0-8d6cf0254027-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.293271 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zpkpf\" (UniqueName: \"kubernetes.io/projected/94f45fa8-3517-4376-b5c0-8d6cf0254027-kube-api-access-zpkpf\") on node \"crc\" DevicePath \"\"" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.293279 4721 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5754f3db-279a-4340-9354-8709b29e0398-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.293289 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1e35ec2-7693-4103-97e1-3304b8ac93f4-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.293297 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.293304 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1e35ec2-7693-4103-97e1-3304b8ac93f4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.310112 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a704275-5d23-4f38-805d-e0f8be9b50dc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9a704275-5d23-4f38-805d-e0f8be9b50dc" (UID: "9a704275-5d23-4f38-805d-e0f8be9b50dc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.362361 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-w6v9r"] Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.395025 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a704275-5d23-4f38-805d-e0f8be9b50dc-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.396948 4721 generic.go:334] "Generic (PLEG): container finished" podID="5754f3db-279a-4340-9354-8709b29e0398" containerID="d864e8ad83784da4bf5b25b3ac2f40fa762c2a9febf27227735c0a5ec525d0a0" exitCode=0 Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.397005 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-gtb8k" event={"ID":"5754f3db-279a-4340-9354-8709b29e0398","Type":"ContainerDied","Data":"d864e8ad83784da4bf5b25b3ac2f40fa762c2a9febf27227735c0a5ec525d0a0"} Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.397044 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-gtb8k" event={"ID":"5754f3db-279a-4340-9354-8709b29e0398","Type":"ContainerDied","Data":"49bd301eff21eb1089700a1612d7e21a5e2bbcb754b662ade99b5440d8bdf391"} Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.397061 4721 scope.go:117] "RemoveContainer" containerID="d864e8ad83784da4bf5b25b3ac2f40fa762c2a9febf27227735c0a5ec525d0a0" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.397157 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-gtb8k" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.403975 4721 generic.go:334] "Generic (PLEG): container finished" podID="94f45fa8-3517-4376-b5c0-8d6cf0254027" containerID="477cba68f14762f95f51d20ad48f30ae54fb8041127c7c3e3c602b5494104e9e" exitCode=0 Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.404019 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8fw86" event={"ID":"94f45fa8-3517-4376-b5c0-8d6cf0254027","Type":"ContainerDied","Data":"477cba68f14762f95f51d20ad48f30ae54fb8041127c7c3e3c602b5494104e9e"} Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.404039 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8fw86" event={"ID":"94f45fa8-3517-4376-b5c0-8d6cf0254027","Type":"ContainerDied","Data":"9527b67e8ce16ec0f2374fd19b42412a7f71c8cac49e9cd53a70f092b4f68df9"} Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.404089 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8fw86" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.407766 4721 generic.go:334] "Generic (PLEG): container finished" podID="5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44" containerID="fcf291d976dcf2bfb09e3035042d0f980b0d6761303b0297455c70a6e8966794" exitCode=0 Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.408270 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4qzrw" event={"ID":"5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44","Type":"ContainerDied","Data":"fcf291d976dcf2bfb09e3035042d0f980b0d6761303b0297455c70a6e8966794"} Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.408306 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4qzrw" event={"ID":"5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44","Type":"ContainerDied","Data":"a810b1da32c93e24800404009f123c028496c160379f847aa2b1afacfebc5ea8"} Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.408460 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4qzrw" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.415234 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-w6v9r" event={"ID":"9b2d6ebc-35e8-4617-ab74-d40b1a19d84e","Type":"ContainerStarted","Data":"1c4b8f8bd6636828c00bbf39d373b9ee075f3d43752845f544d24b2bd31930a8"} Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.421080 4721 generic.go:334] "Generic (PLEG): container finished" podID="9a704275-5d23-4f38-805d-e0f8be9b50dc" containerID="2bd09f7eacf82d6f397f3ed03f094c38f9a4881e9feef857912e824ba14450f0" exitCode=0 Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.421166 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-24l7f" event={"ID":"9a704275-5d23-4f38-805d-e0f8be9b50dc","Type":"ContainerDied","Data":"2bd09f7eacf82d6f397f3ed03f094c38f9a4881e9feef857912e824ba14450f0"} Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.421193 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-24l7f" event={"ID":"9a704275-5d23-4f38-805d-e0f8be9b50dc","Type":"ContainerDied","Data":"ecf656ad151efaf6ea41d416053f0f00783a442c1113e1e0a81317d7c4bfdc75"} Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.421298 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-24l7f" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.426210 4721 scope.go:117] "RemoveContainer" containerID="b137ec3317f50b7b548136e99fcada74e65bb27ef32b783ecb48d77844237afa" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.435138 4721 generic.go:334] "Generic (PLEG): container finished" podID="f1e35ec2-7693-4103-97e1-3304b8ac93f4" containerID="f242372eeed94c90970ae44510bb1d6efd1b66e4504ab8b24938701c3189a833" exitCode=0 Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.435197 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4k25r" event={"ID":"f1e35ec2-7693-4103-97e1-3304b8ac93f4","Type":"ContainerDied","Data":"f242372eeed94c90970ae44510bb1d6efd1b66e4504ab8b24938701c3189a833"} Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.435225 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4k25r" event={"ID":"f1e35ec2-7693-4103-97e1-3304b8ac93f4","Type":"ContainerDied","Data":"e0b37a46efff5578c5ff68c94440f0fca3df8578b4b16dd377101bdf5664c3a1"} Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.435393 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4k25r" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.440945 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gtb8k"] Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.450655 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gtb8k"] Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.455682 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8fw86"] Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.459904 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8fw86"] Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.471775 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4qzrw"] Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.473458 4721 scope.go:117] "RemoveContainer" containerID="d864e8ad83784da4bf5b25b3ac2f40fa762c2a9febf27227735c0a5ec525d0a0" Dec 02 11:57:52 crc kubenswrapper[4721]: E1202 11:57:52.475464 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d864e8ad83784da4bf5b25b3ac2f40fa762c2a9febf27227735c0a5ec525d0a0\": container with ID starting with d864e8ad83784da4bf5b25b3ac2f40fa762c2a9febf27227735c0a5ec525d0a0 not found: ID does not exist" containerID="d864e8ad83784da4bf5b25b3ac2f40fa762c2a9febf27227735c0a5ec525d0a0" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.475507 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d864e8ad83784da4bf5b25b3ac2f40fa762c2a9febf27227735c0a5ec525d0a0"} err="failed to get container status \"d864e8ad83784da4bf5b25b3ac2f40fa762c2a9febf27227735c0a5ec525d0a0\": rpc error: code = NotFound desc = could not find container \"d864e8ad83784da4bf5b25b3ac2f40fa762c2a9febf27227735c0a5ec525d0a0\": container with ID starting with d864e8ad83784da4bf5b25b3ac2f40fa762c2a9febf27227735c0a5ec525d0a0 not found: ID does not exist" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.475536 4721 scope.go:117] "RemoveContainer" containerID="b137ec3317f50b7b548136e99fcada74e65bb27ef32b783ecb48d77844237afa" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.479860 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4qzrw"] Dec 02 11:57:52 crc kubenswrapper[4721]: E1202 11:57:52.480519 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b137ec3317f50b7b548136e99fcada74e65bb27ef32b783ecb48d77844237afa\": container with ID starting with b137ec3317f50b7b548136e99fcada74e65bb27ef32b783ecb48d77844237afa not found: ID does not exist" containerID="b137ec3317f50b7b548136e99fcada74e65bb27ef32b783ecb48d77844237afa" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.480653 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b137ec3317f50b7b548136e99fcada74e65bb27ef32b783ecb48d77844237afa"} err="failed to get container status \"b137ec3317f50b7b548136e99fcada74e65bb27ef32b783ecb48d77844237afa\": rpc error: code = NotFound desc = could not find container \"b137ec3317f50b7b548136e99fcada74e65bb27ef32b783ecb48d77844237afa\": container with ID starting with b137ec3317f50b7b548136e99fcada74e65bb27ef32b783ecb48d77844237afa not found: ID does not exist" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.480741 4721 scope.go:117] "RemoveContainer" containerID="477cba68f14762f95f51d20ad48f30ae54fb8041127c7c3e3c602b5494104e9e" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.490863 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-24l7f"] Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.495007 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-24l7f"] Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.497535 4721 scope.go:117] "RemoveContainer" containerID="54df233cac5ad696657cec8b16bf2d2e5377cbd8679c756cbda09974a469ec7b" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.497756 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4k25r"] Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.500452 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4k25r"] Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.509563 4721 scope.go:117] "RemoveContainer" containerID="828be2538b4c67ba1f07a333f9c900413a2f3b06829984905a48580bee6b2531" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.523894 4721 scope.go:117] "RemoveContainer" containerID="477cba68f14762f95f51d20ad48f30ae54fb8041127c7c3e3c602b5494104e9e" Dec 02 11:57:52 crc kubenswrapper[4721]: E1202 11:57:52.524231 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"477cba68f14762f95f51d20ad48f30ae54fb8041127c7c3e3c602b5494104e9e\": container with ID starting with 477cba68f14762f95f51d20ad48f30ae54fb8041127c7c3e3c602b5494104e9e not found: ID does not exist" containerID="477cba68f14762f95f51d20ad48f30ae54fb8041127c7c3e3c602b5494104e9e" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.524356 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"477cba68f14762f95f51d20ad48f30ae54fb8041127c7c3e3c602b5494104e9e"} err="failed to get container status \"477cba68f14762f95f51d20ad48f30ae54fb8041127c7c3e3c602b5494104e9e\": rpc error: code = NotFound desc = could not find container \"477cba68f14762f95f51d20ad48f30ae54fb8041127c7c3e3c602b5494104e9e\": container with ID starting with 477cba68f14762f95f51d20ad48f30ae54fb8041127c7c3e3c602b5494104e9e not found: ID does not exist" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.524445 4721 scope.go:117] "RemoveContainer" containerID="54df233cac5ad696657cec8b16bf2d2e5377cbd8679c756cbda09974a469ec7b" Dec 02 11:57:52 crc kubenswrapper[4721]: E1202 11:57:52.524787 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54df233cac5ad696657cec8b16bf2d2e5377cbd8679c756cbda09974a469ec7b\": container with ID starting with 54df233cac5ad696657cec8b16bf2d2e5377cbd8679c756cbda09974a469ec7b not found: ID does not exist" containerID="54df233cac5ad696657cec8b16bf2d2e5377cbd8679c756cbda09974a469ec7b" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.524820 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54df233cac5ad696657cec8b16bf2d2e5377cbd8679c756cbda09974a469ec7b"} err="failed to get container status \"54df233cac5ad696657cec8b16bf2d2e5377cbd8679c756cbda09974a469ec7b\": rpc error: code = NotFound desc = could not find container \"54df233cac5ad696657cec8b16bf2d2e5377cbd8679c756cbda09974a469ec7b\": container with ID starting with 54df233cac5ad696657cec8b16bf2d2e5377cbd8679c756cbda09974a469ec7b not found: ID does not exist" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.524841 4721 scope.go:117] "RemoveContainer" containerID="828be2538b4c67ba1f07a333f9c900413a2f3b06829984905a48580bee6b2531" Dec 02 11:57:52 crc kubenswrapper[4721]: E1202 11:57:52.525188 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"828be2538b4c67ba1f07a333f9c900413a2f3b06829984905a48580bee6b2531\": container with ID starting with 828be2538b4c67ba1f07a333f9c900413a2f3b06829984905a48580bee6b2531 not found: ID does not exist" containerID="828be2538b4c67ba1f07a333f9c900413a2f3b06829984905a48580bee6b2531" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.525208 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"828be2538b4c67ba1f07a333f9c900413a2f3b06829984905a48580bee6b2531"} err="failed to get container status \"828be2538b4c67ba1f07a333f9c900413a2f3b06829984905a48580bee6b2531\": rpc error: code = NotFound desc = could not find container \"828be2538b4c67ba1f07a333f9c900413a2f3b06829984905a48580bee6b2531\": container with ID starting with 828be2538b4c67ba1f07a333f9c900413a2f3b06829984905a48580bee6b2531 not found: ID does not exist" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.525222 4721 scope.go:117] "RemoveContainer" containerID="fcf291d976dcf2bfb09e3035042d0f980b0d6761303b0297455c70a6e8966794" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.541100 4721 scope.go:117] "RemoveContainer" containerID="1a0341d448b0dcdedbd73e4b08e0c1023e2b0e0358acbe02b144494022be580d" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.553972 4721 scope.go:117] "RemoveContainer" containerID="cee41dccd15454a4afaeca3f0862d4d9f04fb390e662e1e73e1cce7a460df046" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.566241 4721 scope.go:117] "RemoveContainer" containerID="fcf291d976dcf2bfb09e3035042d0f980b0d6761303b0297455c70a6e8966794" Dec 02 11:57:52 crc kubenswrapper[4721]: E1202 11:57:52.566607 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fcf291d976dcf2bfb09e3035042d0f980b0d6761303b0297455c70a6e8966794\": container with ID starting with fcf291d976dcf2bfb09e3035042d0f980b0d6761303b0297455c70a6e8966794 not found: ID does not exist" containerID="fcf291d976dcf2bfb09e3035042d0f980b0d6761303b0297455c70a6e8966794" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.566637 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fcf291d976dcf2bfb09e3035042d0f980b0d6761303b0297455c70a6e8966794"} err="failed to get container status \"fcf291d976dcf2bfb09e3035042d0f980b0d6761303b0297455c70a6e8966794\": rpc error: code = NotFound desc = could not find container \"fcf291d976dcf2bfb09e3035042d0f980b0d6761303b0297455c70a6e8966794\": container with ID starting with fcf291d976dcf2bfb09e3035042d0f980b0d6761303b0297455c70a6e8966794 not found: ID does not exist" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.566843 4721 scope.go:117] "RemoveContainer" containerID="1a0341d448b0dcdedbd73e4b08e0c1023e2b0e0358acbe02b144494022be580d" Dec 02 11:57:52 crc kubenswrapper[4721]: E1202 11:57:52.567078 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a0341d448b0dcdedbd73e4b08e0c1023e2b0e0358acbe02b144494022be580d\": container with ID starting with 1a0341d448b0dcdedbd73e4b08e0c1023e2b0e0358acbe02b144494022be580d not found: ID does not exist" containerID="1a0341d448b0dcdedbd73e4b08e0c1023e2b0e0358acbe02b144494022be580d" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.567098 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a0341d448b0dcdedbd73e4b08e0c1023e2b0e0358acbe02b144494022be580d"} err="failed to get container status \"1a0341d448b0dcdedbd73e4b08e0c1023e2b0e0358acbe02b144494022be580d\": rpc error: code = NotFound desc = could not find container \"1a0341d448b0dcdedbd73e4b08e0c1023e2b0e0358acbe02b144494022be580d\": container with ID starting with 1a0341d448b0dcdedbd73e4b08e0c1023e2b0e0358acbe02b144494022be580d not found: ID does not exist" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.567111 4721 scope.go:117] "RemoveContainer" containerID="cee41dccd15454a4afaeca3f0862d4d9f04fb390e662e1e73e1cce7a460df046" Dec 02 11:57:52 crc kubenswrapper[4721]: E1202 11:57:52.567281 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cee41dccd15454a4afaeca3f0862d4d9f04fb390e662e1e73e1cce7a460df046\": container with ID starting with cee41dccd15454a4afaeca3f0862d4d9f04fb390e662e1e73e1cce7a460df046 not found: ID does not exist" containerID="cee41dccd15454a4afaeca3f0862d4d9f04fb390e662e1e73e1cce7a460df046" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.567296 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cee41dccd15454a4afaeca3f0862d4d9f04fb390e662e1e73e1cce7a460df046"} err="failed to get container status \"cee41dccd15454a4afaeca3f0862d4d9f04fb390e662e1e73e1cce7a460df046\": rpc error: code = NotFound desc = could not find container \"cee41dccd15454a4afaeca3f0862d4d9f04fb390e662e1e73e1cce7a460df046\": container with ID starting with cee41dccd15454a4afaeca3f0862d4d9f04fb390e662e1e73e1cce7a460df046 not found: ID does not exist" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.567307 4721 scope.go:117] "RemoveContainer" containerID="2bd09f7eacf82d6f397f3ed03f094c38f9a4881e9feef857912e824ba14450f0" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.578628 4721 scope.go:117] "RemoveContainer" containerID="a7b83b3ca582c1246dfce09664809edc19a230a255bea275641edf7d02009d23" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.594492 4721 scope.go:117] "RemoveContainer" containerID="bab484bca3acb35d267b305bbbfa61f099f0a3384d39e8c587f77c621d3e7f43" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.606479 4721 scope.go:117] "RemoveContainer" containerID="2bd09f7eacf82d6f397f3ed03f094c38f9a4881e9feef857912e824ba14450f0" Dec 02 11:57:52 crc kubenswrapper[4721]: E1202 11:57:52.607112 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2bd09f7eacf82d6f397f3ed03f094c38f9a4881e9feef857912e824ba14450f0\": container with ID starting with 2bd09f7eacf82d6f397f3ed03f094c38f9a4881e9feef857912e824ba14450f0 not found: ID does not exist" containerID="2bd09f7eacf82d6f397f3ed03f094c38f9a4881e9feef857912e824ba14450f0" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.607146 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bd09f7eacf82d6f397f3ed03f094c38f9a4881e9feef857912e824ba14450f0"} err="failed to get container status \"2bd09f7eacf82d6f397f3ed03f094c38f9a4881e9feef857912e824ba14450f0\": rpc error: code = NotFound desc = could not find container \"2bd09f7eacf82d6f397f3ed03f094c38f9a4881e9feef857912e824ba14450f0\": container with ID starting with 2bd09f7eacf82d6f397f3ed03f094c38f9a4881e9feef857912e824ba14450f0 not found: ID does not exist" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.607171 4721 scope.go:117] "RemoveContainer" containerID="a7b83b3ca582c1246dfce09664809edc19a230a255bea275641edf7d02009d23" Dec 02 11:57:52 crc kubenswrapper[4721]: E1202 11:57:52.607603 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7b83b3ca582c1246dfce09664809edc19a230a255bea275641edf7d02009d23\": container with ID starting with a7b83b3ca582c1246dfce09664809edc19a230a255bea275641edf7d02009d23 not found: ID does not exist" containerID="a7b83b3ca582c1246dfce09664809edc19a230a255bea275641edf7d02009d23" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.607630 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7b83b3ca582c1246dfce09664809edc19a230a255bea275641edf7d02009d23"} err="failed to get container status \"a7b83b3ca582c1246dfce09664809edc19a230a255bea275641edf7d02009d23\": rpc error: code = NotFound desc = could not find container \"a7b83b3ca582c1246dfce09664809edc19a230a255bea275641edf7d02009d23\": container with ID starting with a7b83b3ca582c1246dfce09664809edc19a230a255bea275641edf7d02009d23 not found: ID does not exist" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.607646 4721 scope.go:117] "RemoveContainer" containerID="bab484bca3acb35d267b305bbbfa61f099f0a3384d39e8c587f77c621d3e7f43" Dec 02 11:57:52 crc kubenswrapper[4721]: E1202 11:57:52.607956 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bab484bca3acb35d267b305bbbfa61f099f0a3384d39e8c587f77c621d3e7f43\": container with ID starting with bab484bca3acb35d267b305bbbfa61f099f0a3384d39e8c587f77c621d3e7f43 not found: ID does not exist" containerID="bab484bca3acb35d267b305bbbfa61f099f0a3384d39e8c587f77c621d3e7f43" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.607987 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bab484bca3acb35d267b305bbbfa61f099f0a3384d39e8c587f77c621d3e7f43"} err="failed to get container status \"bab484bca3acb35d267b305bbbfa61f099f0a3384d39e8c587f77c621d3e7f43\": rpc error: code = NotFound desc = could not find container \"bab484bca3acb35d267b305bbbfa61f099f0a3384d39e8c587f77c621d3e7f43\": container with ID starting with bab484bca3acb35d267b305bbbfa61f099f0a3384d39e8c587f77c621d3e7f43 not found: ID does not exist" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.608004 4721 scope.go:117] "RemoveContainer" containerID="f242372eeed94c90970ae44510bb1d6efd1b66e4504ab8b24938701c3189a833" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.619388 4721 scope.go:117] "RemoveContainer" containerID="ce97d6a116668e7e69ce778452e5b8b330d8642cf694e3969e94af7f404b34f2" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.631546 4721 scope.go:117] "RemoveContainer" containerID="085f3f87c3026e5868a3a02449b3a712fb715852b72b83c7ab4e8b67b60168ca" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.645414 4721 scope.go:117] "RemoveContainer" containerID="f242372eeed94c90970ae44510bb1d6efd1b66e4504ab8b24938701c3189a833" Dec 02 11:57:52 crc kubenswrapper[4721]: E1202 11:57:52.645908 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f242372eeed94c90970ae44510bb1d6efd1b66e4504ab8b24938701c3189a833\": container with ID starting with f242372eeed94c90970ae44510bb1d6efd1b66e4504ab8b24938701c3189a833 not found: ID does not exist" containerID="f242372eeed94c90970ae44510bb1d6efd1b66e4504ab8b24938701c3189a833" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.645944 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f242372eeed94c90970ae44510bb1d6efd1b66e4504ab8b24938701c3189a833"} err="failed to get container status \"f242372eeed94c90970ae44510bb1d6efd1b66e4504ab8b24938701c3189a833\": rpc error: code = NotFound desc = could not find container \"f242372eeed94c90970ae44510bb1d6efd1b66e4504ab8b24938701c3189a833\": container with ID starting with f242372eeed94c90970ae44510bb1d6efd1b66e4504ab8b24938701c3189a833 not found: ID does not exist" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.645974 4721 scope.go:117] "RemoveContainer" containerID="ce97d6a116668e7e69ce778452e5b8b330d8642cf694e3969e94af7f404b34f2" Dec 02 11:57:52 crc kubenswrapper[4721]: E1202 11:57:52.646667 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce97d6a116668e7e69ce778452e5b8b330d8642cf694e3969e94af7f404b34f2\": container with ID starting with ce97d6a116668e7e69ce778452e5b8b330d8642cf694e3969e94af7f404b34f2 not found: ID does not exist" containerID="ce97d6a116668e7e69ce778452e5b8b330d8642cf694e3969e94af7f404b34f2" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.646698 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce97d6a116668e7e69ce778452e5b8b330d8642cf694e3969e94af7f404b34f2"} err="failed to get container status \"ce97d6a116668e7e69ce778452e5b8b330d8642cf694e3969e94af7f404b34f2\": rpc error: code = NotFound desc = could not find container \"ce97d6a116668e7e69ce778452e5b8b330d8642cf694e3969e94af7f404b34f2\": container with ID starting with ce97d6a116668e7e69ce778452e5b8b330d8642cf694e3969e94af7f404b34f2 not found: ID does not exist" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.646716 4721 scope.go:117] "RemoveContainer" containerID="085f3f87c3026e5868a3a02449b3a712fb715852b72b83c7ab4e8b67b60168ca" Dec 02 11:57:52 crc kubenswrapper[4721]: E1202 11:57:52.647108 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"085f3f87c3026e5868a3a02449b3a712fb715852b72b83c7ab4e8b67b60168ca\": container with ID starting with 085f3f87c3026e5868a3a02449b3a712fb715852b72b83c7ab4e8b67b60168ca not found: ID does not exist" containerID="085f3f87c3026e5868a3a02449b3a712fb715852b72b83c7ab4e8b67b60168ca" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.647131 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"085f3f87c3026e5868a3a02449b3a712fb715852b72b83c7ab4e8b67b60168ca"} err="failed to get container status \"085f3f87c3026e5868a3a02449b3a712fb715852b72b83c7ab4e8b67b60168ca\": rpc error: code = NotFound desc = could not find container \"085f3f87c3026e5868a3a02449b3a712fb715852b72b83c7ab4e8b67b60168ca\": container with ID starting with 085f3f87c3026e5868a3a02449b3a712fb715852b72b83c7ab4e8b67b60168ca not found: ID does not exist" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.846424 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5754f3db-279a-4340-9354-8709b29e0398" path="/var/lib/kubelet/pods/5754f3db-279a-4340-9354-8709b29e0398/volumes" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.846979 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44" path="/var/lib/kubelet/pods/5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44/volumes" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.847564 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94f45fa8-3517-4376-b5c0-8d6cf0254027" path="/var/lib/kubelet/pods/94f45fa8-3517-4376-b5c0-8d6cf0254027/volumes" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.848623 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a704275-5d23-4f38-805d-e0f8be9b50dc" path="/var/lib/kubelet/pods/9a704275-5d23-4f38-805d-e0f8be9b50dc/volumes" Dec 02 11:57:52 crc kubenswrapper[4721]: I1202 11:57:52.849324 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1e35ec2-7693-4103-97e1-3304b8ac93f4" path="/var/lib/kubelet/pods/f1e35ec2-7693-4103-97e1-3304b8ac93f4/volumes" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.447509 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-w6v9r" event={"ID":"9b2d6ebc-35e8-4617-ab74-d40b1a19d84e","Type":"ContainerStarted","Data":"9afe447f84afc0319ba831b5556acf0d1efc4bac089090be05edb7117e5c2059"} Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.447971 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-w6v9r" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.454384 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-w6v9r" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.464252 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-w6v9r" podStartSLOduration=2.464234896 podStartE2EDuration="2.464234896s" podCreationTimestamp="2025-12-02 11:57:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:57:53.46041696 +0000 UTC m=+403.198967309" watchObservedRunningTime="2025-12-02 11:57:53.464234896 +0000 UTC m=+403.202785245" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.773995 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-692sp"] Dec 02 11:57:53 crc kubenswrapper[4721]: E1202 11:57:53.774165 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1e35ec2-7693-4103-97e1-3304b8ac93f4" containerName="extract-content" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.774176 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1e35ec2-7693-4103-97e1-3304b8ac93f4" containerName="extract-content" Dec 02 11:57:53 crc kubenswrapper[4721]: E1202 11:57:53.774186 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44" containerName="registry-server" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.774191 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44" containerName="registry-server" Dec 02 11:57:53 crc kubenswrapper[4721]: E1202 11:57:53.774199 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94f45fa8-3517-4376-b5c0-8d6cf0254027" containerName="extract-content" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.774204 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="94f45fa8-3517-4376-b5c0-8d6cf0254027" containerName="extract-content" Dec 02 11:57:53 crc kubenswrapper[4721]: E1202 11:57:53.774211 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1e35ec2-7693-4103-97e1-3304b8ac93f4" containerName="registry-server" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.774217 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1e35ec2-7693-4103-97e1-3304b8ac93f4" containerName="registry-server" Dec 02 11:57:53 crc kubenswrapper[4721]: E1202 11:57:53.774226 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a704275-5d23-4f38-805d-e0f8be9b50dc" containerName="extract-content" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.774231 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a704275-5d23-4f38-805d-e0f8be9b50dc" containerName="extract-content" Dec 02 11:57:53 crc kubenswrapper[4721]: E1202 11:57:53.774241 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94f45fa8-3517-4376-b5c0-8d6cf0254027" containerName="extract-utilities" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.774247 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="94f45fa8-3517-4376-b5c0-8d6cf0254027" containerName="extract-utilities" Dec 02 11:57:53 crc kubenswrapper[4721]: E1202 11:57:53.774257 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44" containerName="extract-content" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.774262 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44" containerName="extract-content" Dec 02 11:57:53 crc kubenswrapper[4721]: E1202 11:57:53.774270 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1e35ec2-7693-4103-97e1-3304b8ac93f4" containerName="extract-utilities" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.774276 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1e35ec2-7693-4103-97e1-3304b8ac93f4" containerName="extract-utilities" Dec 02 11:57:53 crc kubenswrapper[4721]: E1202 11:57:53.774284 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94f45fa8-3517-4376-b5c0-8d6cf0254027" containerName="registry-server" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.774289 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="94f45fa8-3517-4376-b5c0-8d6cf0254027" containerName="registry-server" Dec 02 11:57:53 crc kubenswrapper[4721]: E1202 11:57:53.774298 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a704275-5d23-4f38-805d-e0f8be9b50dc" containerName="registry-server" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.774303 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a704275-5d23-4f38-805d-e0f8be9b50dc" containerName="registry-server" Dec 02 11:57:53 crc kubenswrapper[4721]: E1202 11:57:53.774326 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5754f3db-279a-4340-9354-8709b29e0398" containerName="marketplace-operator" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.774332 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="5754f3db-279a-4340-9354-8709b29e0398" containerName="marketplace-operator" Dec 02 11:57:53 crc kubenswrapper[4721]: E1202 11:57:53.774357 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44" containerName="extract-utilities" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.774363 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44" containerName="extract-utilities" Dec 02 11:57:53 crc kubenswrapper[4721]: E1202 11:57:53.774372 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a704275-5d23-4f38-805d-e0f8be9b50dc" containerName="extract-utilities" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.774377 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a704275-5d23-4f38-805d-e0f8be9b50dc" containerName="extract-utilities" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.774478 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="5754f3db-279a-4340-9354-8709b29e0398" containerName="marketplace-operator" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.774490 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="94f45fa8-3517-4376-b5c0-8d6cf0254027" containerName="registry-server" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.774501 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1e35ec2-7693-4103-97e1-3304b8ac93f4" containerName="registry-server" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.774509 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a704275-5d23-4f38-805d-e0f8be9b50dc" containerName="registry-server" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.774517 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e1d4456-c1ef-4ab1-9b58-5ae5b7f8ef44" containerName="registry-server" Dec 02 11:57:53 crc kubenswrapper[4721]: E1202 11:57:53.774600 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5754f3db-279a-4340-9354-8709b29e0398" containerName="marketplace-operator" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.774607 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="5754f3db-279a-4340-9354-8709b29e0398" containerName="marketplace-operator" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.774679 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="5754f3db-279a-4340-9354-8709b29e0398" containerName="marketplace-operator" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.775146 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-692sp" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.779031 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.782099 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-692sp"] Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.828268 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94263503-850e-479a-9335-8036ea37b30f-utilities\") pod \"redhat-marketplace-692sp\" (UID: \"94263503-850e-479a-9335-8036ea37b30f\") " pod="openshift-marketplace/redhat-marketplace-692sp" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.828337 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94263503-850e-479a-9335-8036ea37b30f-catalog-content\") pod \"redhat-marketplace-692sp\" (UID: \"94263503-850e-479a-9335-8036ea37b30f\") " pod="openshift-marketplace/redhat-marketplace-692sp" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.828375 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8skv\" (UniqueName: \"kubernetes.io/projected/94263503-850e-479a-9335-8036ea37b30f-kube-api-access-w8skv\") pod \"redhat-marketplace-692sp\" (UID: \"94263503-850e-479a-9335-8036ea37b30f\") " pod="openshift-marketplace/redhat-marketplace-692sp" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.929332 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94263503-850e-479a-9335-8036ea37b30f-utilities\") pod \"redhat-marketplace-692sp\" (UID: \"94263503-850e-479a-9335-8036ea37b30f\") " pod="openshift-marketplace/redhat-marketplace-692sp" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.929488 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94263503-850e-479a-9335-8036ea37b30f-catalog-content\") pod \"redhat-marketplace-692sp\" (UID: \"94263503-850e-479a-9335-8036ea37b30f\") " pod="openshift-marketplace/redhat-marketplace-692sp" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.929539 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8skv\" (UniqueName: \"kubernetes.io/projected/94263503-850e-479a-9335-8036ea37b30f-kube-api-access-w8skv\") pod \"redhat-marketplace-692sp\" (UID: \"94263503-850e-479a-9335-8036ea37b30f\") " pod="openshift-marketplace/redhat-marketplace-692sp" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.930637 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94263503-850e-479a-9335-8036ea37b30f-utilities\") pod \"redhat-marketplace-692sp\" (UID: \"94263503-850e-479a-9335-8036ea37b30f\") " pod="openshift-marketplace/redhat-marketplace-692sp" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.931043 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94263503-850e-479a-9335-8036ea37b30f-catalog-content\") pod \"redhat-marketplace-692sp\" (UID: \"94263503-850e-479a-9335-8036ea37b30f\") " pod="openshift-marketplace/redhat-marketplace-692sp" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.955396 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8skv\" (UniqueName: \"kubernetes.io/projected/94263503-850e-479a-9335-8036ea37b30f-kube-api-access-w8skv\") pod \"redhat-marketplace-692sp\" (UID: \"94263503-850e-479a-9335-8036ea37b30f\") " pod="openshift-marketplace/redhat-marketplace-692sp" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.980432 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fd78g"] Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.981671 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fd78g" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.984375 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 02 11:57:53 crc kubenswrapper[4721]: I1202 11:57:53.992438 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fd78g"] Dec 02 11:57:54 crc kubenswrapper[4721]: I1202 11:57:54.030935 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e586d5d2-126d-4886-af16-793d937c0eaf-utilities\") pod \"redhat-operators-fd78g\" (UID: \"e586d5d2-126d-4886-af16-793d937c0eaf\") " pod="openshift-marketplace/redhat-operators-fd78g" Dec 02 11:57:54 crc kubenswrapper[4721]: I1202 11:57:54.030994 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qxkt\" (UniqueName: \"kubernetes.io/projected/e586d5d2-126d-4886-af16-793d937c0eaf-kube-api-access-5qxkt\") pod \"redhat-operators-fd78g\" (UID: \"e586d5d2-126d-4886-af16-793d937c0eaf\") " pod="openshift-marketplace/redhat-operators-fd78g" Dec 02 11:57:54 crc kubenswrapper[4721]: I1202 11:57:54.031023 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e586d5d2-126d-4886-af16-793d937c0eaf-catalog-content\") pod \"redhat-operators-fd78g\" (UID: \"e586d5d2-126d-4886-af16-793d937c0eaf\") " pod="openshift-marketplace/redhat-operators-fd78g" Dec 02 11:57:54 crc kubenswrapper[4721]: I1202 11:57:54.113004 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-692sp" Dec 02 11:57:54 crc kubenswrapper[4721]: I1202 11:57:54.132260 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e586d5d2-126d-4886-af16-793d937c0eaf-utilities\") pod \"redhat-operators-fd78g\" (UID: \"e586d5d2-126d-4886-af16-793d937c0eaf\") " pod="openshift-marketplace/redhat-operators-fd78g" Dec 02 11:57:54 crc kubenswrapper[4721]: I1202 11:57:54.132315 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qxkt\" (UniqueName: \"kubernetes.io/projected/e586d5d2-126d-4886-af16-793d937c0eaf-kube-api-access-5qxkt\") pod \"redhat-operators-fd78g\" (UID: \"e586d5d2-126d-4886-af16-793d937c0eaf\") " pod="openshift-marketplace/redhat-operators-fd78g" Dec 02 11:57:54 crc kubenswrapper[4721]: I1202 11:57:54.132384 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e586d5d2-126d-4886-af16-793d937c0eaf-catalog-content\") pod \"redhat-operators-fd78g\" (UID: \"e586d5d2-126d-4886-af16-793d937c0eaf\") " pod="openshift-marketplace/redhat-operators-fd78g" Dec 02 11:57:54 crc kubenswrapper[4721]: I1202 11:57:54.132817 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e586d5d2-126d-4886-af16-793d937c0eaf-catalog-content\") pod \"redhat-operators-fd78g\" (UID: \"e586d5d2-126d-4886-af16-793d937c0eaf\") " pod="openshift-marketplace/redhat-operators-fd78g" Dec 02 11:57:54 crc kubenswrapper[4721]: I1202 11:57:54.132884 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e586d5d2-126d-4886-af16-793d937c0eaf-utilities\") pod \"redhat-operators-fd78g\" (UID: \"e586d5d2-126d-4886-af16-793d937c0eaf\") " pod="openshift-marketplace/redhat-operators-fd78g" Dec 02 11:57:54 crc kubenswrapper[4721]: I1202 11:57:54.154019 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qxkt\" (UniqueName: \"kubernetes.io/projected/e586d5d2-126d-4886-af16-793d937c0eaf-kube-api-access-5qxkt\") pod \"redhat-operators-fd78g\" (UID: \"e586d5d2-126d-4886-af16-793d937c0eaf\") " pod="openshift-marketplace/redhat-operators-fd78g" Dec 02 11:57:54 crc kubenswrapper[4721]: I1202 11:57:54.294171 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-692sp"] Dec 02 11:57:54 crc kubenswrapper[4721]: I1202 11:57:54.310063 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fd78g" Dec 02 11:57:54 crc kubenswrapper[4721]: W1202 11:57:54.313380 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod94263503_850e_479a_9335_8036ea37b30f.slice/crio-996af0a011288a58dcf88e19ed84b1ef429cd65a9cc34f13c436c8ca15a09146 WatchSource:0}: Error finding container 996af0a011288a58dcf88e19ed84b1ef429cd65a9cc34f13c436c8ca15a09146: Status 404 returned error can't find the container with id 996af0a011288a58dcf88e19ed84b1ef429cd65a9cc34f13c436c8ca15a09146 Dec 02 11:57:54 crc kubenswrapper[4721]: I1202 11:57:54.458584 4721 generic.go:334] "Generic (PLEG): container finished" podID="94263503-850e-479a-9335-8036ea37b30f" containerID="5f91aea5617bdcaf2b309cd6d4a7cdf4b9d9880528fa29e1b2f008028d49d0f2" exitCode=0 Dec 02 11:57:54 crc kubenswrapper[4721]: I1202 11:57:54.459605 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-692sp" event={"ID":"94263503-850e-479a-9335-8036ea37b30f","Type":"ContainerDied","Data":"5f91aea5617bdcaf2b309cd6d4a7cdf4b9d9880528fa29e1b2f008028d49d0f2"} Dec 02 11:57:54 crc kubenswrapper[4721]: I1202 11:57:54.459681 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-692sp" event={"ID":"94263503-850e-479a-9335-8036ea37b30f","Type":"ContainerStarted","Data":"996af0a011288a58dcf88e19ed84b1ef429cd65a9cc34f13c436c8ca15a09146"} Dec 02 11:57:54 crc kubenswrapper[4721]: I1202 11:57:54.470807 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fd78g"] Dec 02 11:57:54 crc kubenswrapper[4721]: W1202 11:57:54.502129 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode586d5d2_126d_4886_af16_793d937c0eaf.slice/crio-67aa717d2a18f076e0a0eb8d636de10e16fcfc12595ed63865040f4e51f362a9 WatchSource:0}: Error finding container 67aa717d2a18f076e0a0eb8d636de10e16fcfc12595ed63865040f4e51f362a9: Status 404 returned error can't find the container with id 67aa717d2a18f076e0a0eb8d636de10e16fcfc12595ed63865040f4e51f362a9 Dec 02 11:57:55 crc kubenswrapper[4721]: I1202 11:57:55.465414 4721 generic.go:334] "Generic (PLEG): container finished" podID="94263503-850e-479a-9335-8036ea37b30f" containerID="716cd9ec3b31f7d95c46636efdf4d130073bf9997c8028b8061fcc53f3c9a9a3" exitCode=0 Dec 02 11:57:55 crc kubenswrapper[4721]: I1202 11:57:55.465473 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-692sp" event={"ID":"94263503-850e-479a-9335-8036ea37b30f","Type":"ContainerDied","Data":"716cd9ec3b31f7d95c46636efdf4d130073bf9997c8028b8061fcc53f3c9a9a3"} Dec 02 11:57:55 crc kubenswrapper[4721]: I1202 11:57:55.468292 4721 generic.go:334] "Generic (PLEG): container finished" podID="e586d5d2-126d-4886-af16-793d937c0eaf" containerID="1179fc2a4e8d1384cac04a8998daddb9a07333c9e29a324bb6c0feaf24f2c47f" exitCode=0 Dec 02 11:57:55 crc kubenswrapper[4721]: I1202 11:57:55.468391 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fd78g" event={"ID":"e586d5d2-126d-4886-af16-793d937c0eaf","Type":"ContainerDied","Data":"1179fc2a4e8d1384cac04a8998daddb9a07333c9e29a324bb6c0feaf24f2c47f"} Dec 02 11:57:55 crc kubenswrapper[4721]: I1202 11:57:55.468425 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fd78g" event={"ID":"e586d5d2-126d-4886-af16-793d937c0eaf","Type":"ContainerStarted","Data":"67aa717d2a18f076e0a0eb8d636de10e16fcfc12595ed63865040f4e51f362a9"} Dec 02 11:57:55 crc kubenswrapper[4721]: I1202 11:57:55.702557 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mcr6s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 11:57:55 crc kubenswrapper[4721]: I1202 11:57:55.702612 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.175367 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wwzsl"] Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.176580 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wwzsl" Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.178829 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.196942 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wwzsl"] Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.357382 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gznd\" (UniqueName: \"kubernetes.io/projected/0cd807a4-666e-4d8e-937d-bddbe10b5587-kube-api-access-5gznd\") pod \"certified-operators-wwzsl\" (UID: \"0cd807a4-666e-4d8e-937d-bddbe10b5587\") " pod="openshift-marketplace/certified-operators-wwzsl" Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.357489 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cd807a4-666e-4d8e-937d-bddbe10b5587-catalog-content\") pod \"certified-operators-wwzsl\" (UID: \"0cd807a4-666e-4d8e-937d-bddbe10b5587\") " pod="openshift-marketplace/certified-operators-wwzsl" Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.357534 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cd807a4-666e-4d8e-937d-bddbe10b5587-utilities\") pod \"certified-operators-wwzsl\" (UID: \"0cd807a4-666e-4d8e-937d-bddbe10b5587\") " pod="openshift-marketplace/certified-operators-wwzsl" Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.367538 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-k95pl"] Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.368459 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k95pl" Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.375309 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.378957 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-k95pl"] Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.458648 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0efa73b-b118-4069-a168-bb26db513ef7-utilities\") pod \"community-operators-k95pl\" (UID: \"f0efa73b-b118-4069-a168-bb26db513ef7\") " pod="openshift-marketplace/community-operators-k95pl" Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.459002 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0efa73b-b118-4069-a168-bb26db513ef7-catalog-content\") pod \"community-operators-k95pl\" (UID: \"f0efa73b-b118-4069-a168-bb26db513ef7\") " pod="openshift-marketplace/community-operators-k95pl" Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.459050 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94z98\" (UniqueName: \"kubernetes.io/projected/f0efa73b-b118-4069-a168-bb26db513ef7-kube-api-access-94z98\") pod \"community-operators-k95pl\" (UID: \"f0efa73b-b118-4069-a168-bb26db513ef7\") " pod="openshift-marketplace/community-operators-k95pl" Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.459080 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cd807a4-666e-4d8e-937d-bddbe10b5587-catalog-content\") pod \"certified-operators-wwzsl\" (UID: \"0cd807a4-666e-4d8e-937d-bddbe10b5587\") " pod="openshift-marketplace/certified-operators-wwzsl" Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.459162 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cd807a4-666e-4d8e-937d-bddbe10b5587-utilities\") pod \"certified-operators-wwzsl\" (UID: \"0cd807a4-666e-4d8e-937d-bddbe10b5587\") " pod="openshift-marketplace/certified-operators-wwzsl" Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.459217 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gznd\" (UniqueName: \"kubernetes.io/projected/0cd807a4-666e-4d8e-937d-bddbe10b5587-kube-api-access-5gznd\") pod \"certified-operators-wwzsl\" (UID: \"0cd807a4-666e-4d8e-937d-bddbe10b5587\") " pod="openshift-marketplace/certified-operators-wwzsl" Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.459513 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cd807a4-666e-4d8e-937d-bddbe10b5587-catalog-content\") pod \"certified-operators-wwzsl\" (UID: \"0cd807a4-666e-4d8e-937d-bddbe10b5587\") " pod="openshift-marketplace/certified-operators-wwzsl" Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.459724 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cd807a4-666e-4d8e-937d-bddbe10b5587-utilities\") pod \"certified-operators-wwzsl\" (UID: \"0cd807a4-666e-4d8e-937d-bddbe10b5587\") " pod="openshift-marketplace/certified-operators-wwzsl" Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.474379 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fd78g" event={"ID":"e586d5d2-126d-4886-af16-793d937c0eaf","Type":"ContainerStarted","Data":"3e0b4410caff08b9f8868dc5d0c66746984085536f7c1623541bb43dfcf75f26"} Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.477280 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gznd\" (UniqueName: \"kubernetes.io/projected/0cd807a4-666e-4d8e-937d-bddbe10b5587-kube-api-access-5gznd\") pod \"certified-operators-wwzsl\" (UID: \"0cd807a4-666e-4d8e-937d-bddbe10b5587\") " pod="openshift-marketplace/certified-operators-wwzsl" Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.477500 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-692sp" event={"ID":"94263503-850e-479a-9335-8036ea37b30f","Type":"ContainerStarted","Data":"e521b66841b69ae93e66a47a2dedf2a56310155b43523ee8e6420fa746749db0"} Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.504533 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-692sp" podStartSLOduration=1.985359713 podStartE2EDuration="3.50451571s" podCreationTimestamp="2025-12-02 11:57:53 +0000 UTC" firstStartedPulling="2025-12-02 11:57:54.46053998 +0000 UTC m=+404.199090329" lastFinishedPulling="2025-12-02 11:57:55.979695977 +0000 UTC m=+405.718246326" observedRunningTime="2025-12-02 11:57:56.502703865 +0000 UTC m=+406.241254214" watchObservedRunningTime="2025-12-02 11:57:56.50451571 +0000 UTC m=+406.243066059" Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.514663 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wwzsl" Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.559840 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94z98\" (UniqueName: \"kubernetes.io/projected/f0efa73b-b118-4069-a168-bb26db513ef7-kube-api-access-94z98\") pod \"community-operators-k95pl\" (UID: \"f0efa73b-b118-4069-a168-bb26db513ef7\") " pod="openshift-marketplace/community-operators-k95pl" Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.559974 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0efa73b-b118-4069-a168-bb26db513ef7-utilities\") pod \"community-operators-k95pl\" (UID: \"f0efa73b-b118-4069-a168-bb26db513ef7\") " pod="openshift-marketplace/community-operators-k95pl" Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.559998 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0efa73b-b118-4069-a168-bb26db513ef7-catalog-content\") pod \"community-operators-k95pl\" (UID: \"f0efa73b-b118-4069-a168-bb26db513ef7\") " pod="openshift-marketplace/community-operators-k95pl" Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.560511 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0efa73b-b118-4069-a168-bb26db513ef7-utilities\") pod \"community-operators-k95pl\" (UID: \"f0efa73b-b118-4069-a168-bb26db513ef7\") " pod="openshift-marketplace/community-operators-k95pl" Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.560546 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0efa73b-b118-4069-a168-bb26db513ef7-catalog-content\") pod \"community-operators-k95pl\" (UID: \"f0efa73b-b118-4069-a168-bb26db513ef7\") " pod="openshift-marketplace/community-operators-k95pl" Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.580760 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94z98\" (UniqueName: \"kubernetes.io/projected/f0efa73b-b118-4069-a168-bb26db513ef7-kube-api-access-94z98\") pod \"community-operators-k95pl\" (UID: \"f0efa73b-b118-4069-a168-bb26db513ef7\") " pod="openshift-marketplace/community-operators-k95pl" Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.686825 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k95pl" Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.873667 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-k95pl"] Dec 02 11:57:56 crc kubenswrapper[4721]: W1202 11:57:56.879425 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf0efa73b_b118_4069_a168_bb26db513ef7.slice/crio-b996be8d804569a11f1865dddde3138f9e723cb1bfccb6cc61f0a78bf17f60db WatchSource:0}: Error finding container b996be8d804569a11f1865dddde3138f9e723cb1bfccb6cc61f0a78bf17f60db: Status 404 returned error can't find the container with id b996be8d804569a11f1865dddde3138f9e723cb1bfccb6cc61f0a78bf17f60db Dec 02 11:57:56 crc kubenswrapper[4721]: I1202 11:57:56.914899 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wwzsl"] Dec 02 11:57:57 crc kubenswrapper[4721]: W1202 11:57:57.003502 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0cd807a4_666e_4d8e_937d_bddbe10b5587.slice/crio-db5883babc7a6fc08e67214582d15f6dd96131fda928d374974f77cdc2a07da0 WatchSource:0}: Error finding container db5883babc7a6fc08e67214582d15f6dd96131fda928d374974f77cdc2a07da0: Status 404 returned error can't find the container with id db5883babc7a6fc08e67214582d15f6dd96131fda928d374974f77cdc2a07da0 Dec 02 11:57:57 crc kubenswrapper[4721]: I1202 11:57:57.483371 4721 generic.go:334] "Generic (PLEG): container finished" podID="e586d5d2-126d-4886-af16-793d937c0eaf" containerID="3e0b4410caff08b9f8868dc5d0c66746984085536f7c1623541bb43dfcf75f26" exitCode=0 Dec 02 11:57:57 crc kubenswrapper[4721]: I1202 11:57:57.483484 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fd78g" event={"ID":"e586d5d2-126d-4886-af16-793d937c0eaf","Type":"ContainerDied","Data":"3e0b4410caff08b9f8868dc5d0c66746984085536f7c1623541bb43dfcf75f26"} Dec 02 11:57:57 crc kubenswrapper[4721]: I1202 11:57:57.485962 4721 generic.go:334] "Generic (PLEG): container finished" podID="0cd807a4-666e-4d8e-937d-bddbe10b5587" containerID="26fbc9503d0d198bd1dc90c4c7dc7c499e167bfa9d2124ae9e156f6c1ed71997" exitCode=0 Dec 02 11:57:57 crc kubenswrapper[4721]: I1202 11:57:57.486027 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wwzsl" event={"ID":"0cd807a4-666e-4d8e-937d-bddbe10b5587","Type":"ContainerDied","Data":"26fbc9503d0d198bd1dc90c4c7dc7c499e167bfa9d2124ae9e156f6c1ed71997"} Dec 02 11:57:57 crc kubenswrapper[4721]: I1202 11:57:57.486047 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wwzsl" event={"ID":"0cd807a4-666e-4d8e-937d-bddbe10b5587","Type":"ContainerStarted","Data":"db5883babc7a6fc08e67214582d15f6dd96131fda928d374974f77cdc2a07da0"} Dec 02 11:57:57 crc kubenswrapper[4721]: I1202 11:57:57.495855 4721 generic.go:334] "Generic (PLEG): container finished" podID="f0efa73b-b118-4069-a168-bb26db513ef7" containerID="147ef7d4fcfdeb18101605bc70594ba77f68232b8d65f07b67e54d2e915888d0" exitCode=0 Dec 02 11:57:57 crc kubenswrapper[4721]: I1202 11:57:57.495965 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k95pl" event={"ID":"f0efa73b-b118-4069-a168-bb26db513ef7","Type":"ContainerDied","Data":"147ef7d4fcfdeb18101605bc70594ba77f68232b8d65f07b67e54d2e915888d0"} Dec 02 11:57:57 crc kubenswrapper[4721]: I1202 11:57:57.496007 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k95pl" event={"ID":"f0efa73b-b118-4069-a168-bb26db513ef7","Type":"ContainerStarted","Data":"b996be8d804569a11f1865dddde3138f9e723cb1bfccb6cc61f0a78bf17f60db"} Dec 02 11:57:58 crc kubenswrapper[4721]: I1202 11:57:58.588559 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" podUID="ed8a3365-c6aa-472a-bb41-10905fcabbf6" containerName="registry" containerID="cri-o://c0c3683e38762dd5217e7f03057453a9ae51f782d3c88073a73f8a70cbc08b41" gracePeriod=30 Dec 02 11:57:59 crc kubenswrapper[4721]: I1202 11:57:59.520200 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fd78g" event={"ID":"e586d5d2-126d-4886-af16-793d937c0eaf","Type":"ContainerStarted","Data":"e2b143c7fbd6d693fa1b1185d7e56ca6b55a294b03d6a91d808c154ef69e71d7"} Dec 02 11:57:59 crc kubenswrapper[4721]: I1202 11:57:59.522492 4721 generic.go:334] "Generic (PLEG): container finished" podID="ed8a3365-c6aa-472a-bb41-10905fcabbf6" containerID="c0c3683e38762dd5217e7f03057453a9ae51f782d3c88073a73f8a70cbc08b41" exitCode=0 Dec 02 11:57:59 crc kubenswrapper[4721]: I1202 11:57:59.522517 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" event={"ID":"ed8a3365-c6aa-472a-bb41-10905fcabbf6","Type":"ContainerDied","Data":"c0c3683e38762dd5217e7f03057453a9ae51f782d3c88073a73f8a70cbc08b41"} Dec 02 11:57:59 crc kubenswrapper[4721]: I1202 11:57:59.522531 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" event={"ID":"ed8a3365-c6aa-472a-bb41-10905fcabbf6","Type":"ContainerDied","Data":"ee0334c1c87e68032d31064a6bdd2bfc571483eaf0e1f7527f989688f8a4be8f"} Dec 02 11:57:59 crc kubenswrapper[4721]: I1202 11:57:59.522542 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ee0334c1c87e68032d31064a6bdd2bfc571483eaf0e1f7527f989688f8a4be8f" Dec 02 11:57:59 crc kubenswrapper[4721]: I1202 11:57:59.538140 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fd78g" podStartSLOduration=3.620718459 podStartE2EDuration="6.538125339s" podCreationTimestamp="2025-12-02 11:57:53 +0000 UTC" firstStartedPulling="2025-12-02 11:57:55.474738471 +0000 UTC m=+405.213288820" lastFinishedPulling="2025-12-02 11:57:58.392145351 +0000 UTC m=+408.130695700" observedRunningTime="2025-12-02 11:57:59.534989263 +0000 UTC m=+409.273539632" watchObservedRunningTime="2025-12-02 11:57:59.538125339 +0000 UTC m=+409.276675688" Dec 02 11:57:59 crc kubenswrapper[4721]: I1202 11:57:59.812156 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:57:59 crc kubenswrapper[4721]: I1202 11:57:59.926208 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ed8a3365-c6aa-472a-bb41-10905fcabbf6-ca-trust-extracted\") pod \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " Dec 02 11:57:59 crc kubenswrapper[4721]: I1202 11:57:59.926487 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ed8a3365-c6aa-472a-bb41-10905fcabbf6-registry-certificates\") pod \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " Dec 02 11:57:59 crc kubenswrapper[4721]: I1202 11:57:59.926621 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ed8a3365-c6aa-472a-bb41-10905fcabbf6-bound-sa-token\") pod \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " Dec 02 11:57:59 crc kubenswrapper[4721]: I1202 11:57:59.926731 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5qdks\" (UniqueName: \"kubernetes.io/projected/ed8a3365-c6aa-472a-bb41-10905fcabbf6-kube-api-access-5qdks\") pod \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " Dec 02 11:57:59 crc kubenswrapper[4721]: I1202 11:57:59.926838 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ed8a3365-c6aa-472a-bb41-10905fcabbf6-installation-pull-secrets\") pod \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " Dec 02 11:57:59 crc kubenswrapper[4721]: I1202 11:57:59.926942 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ed8a3365-c6aa-472a-bb41-10905fcabbf6-trusted-ca\") pod \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " Dec 02 11:57:59 crc kubenswrapper[4721]: I1202 11:57:59.927067 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ed8a3365-c6aa-472a-bb41-10905fcabbf6-registry-tls\") pod \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " Dec 02 11:57:59 crc kubenswrapper[4721]: I1202 11:57:59.927309 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\" (UID: \"ed8a3365-c6aa-472a-bb41-10905fcabbf6\") " Dec 02 11:57:59 crc kubenswrapper[4721]: I1202 11:57:59.927310 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed8a3365-c6aa-472a-bb41-10905fcabbf6-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "ed8a3365-c6aa-472a-bb41-10905fcabbf6" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:57:59 crc kubenswrapper[4721]: I1202 11:57:59.927449 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed8a3365-c6aa-472a-bb41-10905fcabbf6-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "ed8a3365-c6aa-472a-bb41-10905fcabbf6" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:57:59 crc kubenswrapper[4721]: I1202 11:57:59.927885 4721 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ed8a3365-c6aa-472a-bb41-10905fcabbf6-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 02 11:57:59 crc kubenswrapper[4721]: I1202 11:57:59.927963 4721 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ed8a3365-c6aa-472a-bb41-10905fcabbf6-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 11:57:59 crc kubenswrapper[4721]: I1202 11:57:59.933873 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed8a3365-c6aa-472a-bb41-10905fcabbf6-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "ed8a3365-c6aa-472a-bb41-10905fcabbf6" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:57:59 crc kubenswrapper[4721]: I1202 11:57:59.942452 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "ed8a3365-c6aa-472a-bb41-10905fcabbf6" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 02 11:57:59 crc kubenswrapper[4721]: I1202 11:57:59.950192 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed8a3365-c6aa-472a-bb41-10905fcabbf6-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "ed8a3365-c6aa-472a-bb41-10905fcabbf6" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:57:59 crc kubenswrapper[4721]: I1202 11:57:59.951523 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed8a3365-c6aa-472a-bb41-10905fcabbf6-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "ed8a3365-c6aa-472a-bb41-10905fcabbf6" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:57:59 crc kubenswrapper[4721]: I1202 11:57:59.951792 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed8a3365-c6aa-472a-bb41-10905fcabbf6-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "ed8a3365-c6aa-472a-bb41-10905fcabbf6" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:57:59 crc kubenswrapper[4721]: I1202 11:57:59.952087 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed8a3365-c6aa-472a-bb41-10905fcabbf6-kube-api-access-5qdks" (OuterVolumeSpecName: "kube-api-access-5qdks") pod "ed8a3365-c6aa-472a-bb41-10905fcabbf6" (UID: "ed8a3365-c6aa-472a-bb41-10905fcabbf6"). InnerVolumeSpecName "kube-api-access-5qdks". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:58:00 crc kubenswrapper[4721]: I1202 11:58:00.029163 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5qdks\" (UniqueName: \"kubernetes.io/projected/ed8a3365-c6aa-472a-bb41-10905fcabbf6-kube-api-access-5qdks\") on node \"crc\" DevicePath \"\"" Dec 02 11:58:00 crc kubenswrapper[4721]: I1202 11:58:00.029205 4721 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ed8a3365-c6aa-472a-bb41-10905fcabbf6-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 02 11:58:00 crc kubenswrapper[4721]: I1202 11:58:00.029215 4721 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ed8a3365-c6aa-472a-bb41-10905fcabbf6-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 02 11:58:00 crc kubenswrapper[4721]: I1202 11:58:00.029224 4721 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ed8a3365-c6aa-472a-bb41-10905fcabbf6-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 02 11:58:00 crc kubenswrapper[4721]: I1202 11:58:00.029232 4721 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ed8a3365-c6aa-472a-bb41-10905fcabbf6-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 02 11:58:00 crc kubenswrapper[4721]: I1202 11:58:00.529501 4721 generic.go:334] "Generic (PLEG): container finished" podID="0cd807a4-666e-4d8e-937d-bddbe10b5587" containerID="dce4bb47606d6583a3b71933ffdf18fcc8dabfdea69aa5ac8792c13fb1e0fb24" exitCode=0 Dec 02 11:58:00 crc kubenswrapper[4721]: I1202 11:58:00.529609 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wwzsl" event={"ID":"0cd807a4-666e-4d8e-937d-bddbe10b5587","Type":"ContainerDied","Data":"dce4bb47606d6583a3b71933ffdf18fcc8dabfdea69aa5ac8792c13fb1e0fb24"} Dec 02 11:58:00 crc kubenswrapper[4721]: I1202 11:58:00.532775 4721 generic.go:334] "Generic (PLEG): container finished" podID="f0efa73b-b118-4069-a168-bb26db513ef7" containerID="16f271e82018df2d56d06e371594f26063be4b91e24eb29ef1d3da793ef74aa3" exitCode=0 Dec 02 11:58:00 crc kubenswrapper[4721]: I1202 11:58:00.532835 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k95pl" event={"ID":"f0efa73b-b118-4069-a168-bb26db513ef7","Type":"ContainerDied","Data":"16f271e82018df2d56d06e371594f26063be4b91e24eb29ef1d3da793ef74aa3"} Dec 02 11:58:00 crc kubenswrapper[4721]: I1202 11:58:00.532877 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-tkr2s" Dec 02 11:58:00 crc kubenswrapper[4721]: I1202 11:58:00.576578 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-tkr2s"] Dec 02 11:58:00 crc kubenswrapper[4721]: I1202 11:58:00.585669 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-tkr2s"] Dec 02 11:58:00 crc kubenswrapper[4721]: I1202 11:58:00.848126 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed8a3365-c6aa-472a-bb41-10905fcabbf6" path="/var/lib/kubelet/pods/ed8a3365-c6aa-472a-bb41-10905fcabbf6/volumes" Dec 02 11:58:02 crc kubenswrapper[4721]: I1202 11:58:02.544948 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k95pl" event={"ID":"f0efa73b-b118-4069-a168-bb26db513ef7","Type":"ContainerStarted","Data":"d9ca95165f64476c2433a525cb69c76084e4f1f9b6a72ed69873a8c81f8abc91"} Dec 02 11:58:02 crc kubenswrapper[4721]: I1202 11:58:02.548578 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wwzsl" event={"ID":"0cd807a4-666e-4d8e-937d-bddbe10b5587","Type":"ContainerStarted","Data":"88c528cb646f763d1656d2699c9ec8ce76a12ae95dd19727f4ff21d28307b548"} Dec 02 11:58:02 crc kubenswrapper[4721]: I1202 11:58:02.572225 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-k95pl" podStartSLOduration=2.742087989 podStartE2EDuration="6.572209333s" podCreationTimestamp="2025-12-02 11:57:56 +0000 UTC" firstStartedPulling="2025-12-02 11:57:57.500926986 +0000 UTC m=+407.239477335" lastFinishedPulling="2025-12-02 11:58:01.33104833 +0000 UTC m=+411.069598679" observedRunningTime="2025-12-02 11:58:02.571175481 +0000 UTC m=+412.309725840" watchObservedRunningTime="2025-12-02 11:58:02.572209333 +0000 UTC m=+412.310759682" Dec 02 11:58:02 crc kubenswrapper[4721]: I1202 11:58:02.592233 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wwzsl" podStartSLOduration=2.779968561 podStartE2EDuration="6.592216426s" podCreationTimestamp="2025-12-02 11:57:56 +0000 UTC" firstStartedPulling="2025-12-02 11:57:57.488417114 +0000 UTC m=+407.226967463" lastFinishedPulling="2025-12-02 11:58:01.300664989 +0000 UTC m=+411.039215328" observedRunningTime="2025-12-02 11:58:02.589184703 +0000 UTC m=+412.327735062" watchObservedRunningTime="2025-12-02 11:58:02.592216426 +0000 UTC m=+412.330766775" Dec 02 11:58:04 crc kubenswrapper[4721]: I1202 11:58:04.113693 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-692sp" Dec 02 11:58:04 crc kubenswrapper[4721]: I1202 11:58:04.114096 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-692sp" Dec 02 11:58:04 crc kubenswrapper[4721]: I1202 11:58:04.164171 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-692sp" Dec 02 11:58:04 crc kubenswrapper[4721]: I1202 11:58:04.310917 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fd78g" Dec 02 11:58:04 crc kubenswrapper[4721]: I1202 11:58:04.310968 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fd78g" Dec 02 11:58:04 crc kubenswrapper[4721]: I1202 11:58:04.355249 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fd78g" Dec 02 11:58:04 crc kubenswrapper[4721]: I1202 11:58:04.594816 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fd78g" Dec 02 11:58:04 crc kubenswrapper[4721]: I1202 11:58:04.596749 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-692sp" Dec 02 11:58:06 crc kubenswrapper[4721]: I1202 11:58:06.515769 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wwzsl" Dec 02 11:58:06 crc kubenswrapper[4721]: I1202 11:58:06.516077 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wwzsl" Dec 02 11:58:06 crc kubenswrapper[4721]: I1202 11:58:06.553323 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wwzsl" Dec 02 11:58:06 crc kubenswrapper[4721]: I1202 11:58:06.603338 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wwzsl" Dec 02 11:58:06 crc kubenswrapper[4721]: I1202 11:58:06.687958 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-k95pl" Dec 02 11:58:06 crc kubenswrapper[4721]: I1202 11:58:06.688299 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-k95pl" Dec 02 11:58:06 crc kubenswrapper[4721]: I1202 11:58:06.730886 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-k95pl" Dec 02 11:58:07 crc kubenswrapper[4721]: I1202 11:58:07.607224 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-k95pl" Dec 02 11:58:25 crc kubenswrapper[4721]: I1202 11:58:25.702871 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mcr6s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 11:58:25 crc kubenswrapper[4721]: I1202 11:58:25.703433 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 11:58:25 crc kubenswrapper[4721]: I1202 11:58:25.703490 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" Dec 02 11:58:25 crc kubenswrapper[4721]: I1202 11:58:25.704150 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"af0d455046abe7542ae5be55dc88c0a9b4a1f4a60770dc97714ed00134bf7dd9"} pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 11:58:25 crc kubenswrapper[4721]: I1202 11:58:25.704215 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" containerID="cri-o://af0d455046abe7542ae5be55dc88c0a9b4a1f4a60770dc97714ed00134bf7dd9" gracePeriod=600 Dec 02 11:58:26 crc kubenswrapper[4721]: I1202 11:58:26.672792 4721 generic.go:334] "Generic (PLEG): container finished" podID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerID="af0d455046abe7542ae5be55dc88c0a9b4a1f4a60770dc97714ed00134bf7dd9" exitCode=0 Dec 02 11:58:26 crc kubenswrapper[4721]: I1202 11:58:26.672911 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" event={"ID":"141842b6-fd0c-48e6-9795-a627e6f96bd4","Type":"ContainerDied","Data":"af0d455046abe7542ae5be55dc88c0a9b4a1f4a60770dc97714ed00134bf7dd9"} Dec 02 11:58:26 crc kubenswrapper[4721]: I1202 11:58:26.673390 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" event={"ID":"141842b6-fd0c-48e6-9795-a627e6f96bd4","Type":"ContainerStarted","Data":"2b5a3e04a093afd734df76e21cc539ac2697b5e275c6cc77be5a62e443752e2f"} Dec 02 11:58:26 crc kubenswrapper[4721]: I1202 11:58:26.673407 4721 scope.go:117] "RemoveContainer" containerID="408dcbe697c9d5042d772604903626107c292e10ba418c2ff7b66079e214eeff" Dec 02 12:00:00 crc kubenswrapper[4721]: I1202 12:00:00.181452 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411280-tst2l"] Dec 02 12:00:00 crc kubenswrapper[4721]: E1202 12:00:00.182309 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed8a3365-c6aa-472a-bb41-10905fcabbf6" containerName="registry" Dec 02 12:00:00 crc kubenswrapper[4721]: I1202 12:00:00.182328 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed8a3365-c6aa-472a-bb41-10905fcabbf6" containerName="registry" Dec 02 12:00:00 crc kubenswrapper[4721]: I1202 12:00:00.182666 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed8a3365-c6aa-472a-bb41-10905fcabbf6" containerName="registry" Dec 02 12:00:00 crc kubenswrapper[4721]: I1202 12:00:00.183129 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411280-tst2l" Dec 02 12:00:00 crc kubenswrapper[4721]: I1202 12:00:00.185246 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 12:00:00 crc kubenswrapper[4721]: I1202 12:00:00.186671 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 12:00:00 crc kubenswrapper[4721]: I1202 12:00:00.194676 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411280-tst2l"] Dec 02 12:00:00 crc kubenswrapper[4721]: I1202 12:00:00.303500 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pbz7c\" (UniqueName: \"kubernetes.io/projected/9bf185fe-0c7d-448a-bfdc-f97c66b04870-kube-api-access-pbz7c\") pod \"collect-profiles-29411280-tst2l\" (UID: \"9bf185fe-0c7d-448a-bfdc-f97c66b04870\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411280-tst2l" Dec 02 12:00:00 crc kubenswrapper[4721]: I1202 12:00:00.303556 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9bf185fe-0c7d-448a-bfdc-f97c66b04870-secret-volume\") pod \"collect-profiles-29411280-tst2l\" (UID: \"9bf185fe-0c7d-448a-bfdc-f97c66b04870\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411280-tst2l" Dec 02 12:00:00 crc kubenswrapper[4721]: I1202 12:00:00.303583 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9bf185fe-0c7d-448a-bfdc-f97c66b04870-config-volume\") pod \"collect-profiles-29411280-tst2l\" (UID: \"9bf185fe-0c7d-448a-bfdc-f97c66b04870\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411280-tst2l" Dec 02 12:00:00 crc kubenswrapper[4721]: I1202 12:00:00.404149 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9bf185fe-0c7d-448a-bfdc-f97c66b04870-config-volume\") pod \"collect-profiles-29411280-tst2l\" (UID: \"9bf185fe-0c7d-448a-bfdc-f97c66b04870\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411280-tst2l" Dec 02 12:00:00 crc kubenswrapper[4721]: I1202 12:00:00.404248 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pbz7c\" (UniqueName: \"kubernetes.io/projected/9bf185fe-0c7d-448a-bfdc-f97c66b04870-kube-api-access-pbz7c\") pod \"collect-profiles-29411280-tst2l\" (UID: \"9bf185fe-0c7d-448a-bfdc-f97c66b04870\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411280-tst2l" Dec 02 12:00:00 crc kubenswrapper[4721]: I1202 12:00:00.404276 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9bf185fe-0c7d-448a-bfdc-f97c66b04870-secret-volume\") pod \"collect-profiles-29411280-tst2l\" (UID: \"9bf185fe-0c7d-448a-bfdc-f97c66b04870\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411280-tst2l" Dec 02 12:00:00 crc kubenswrapper[4721]: I1202 12:00:00.405195 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9bf185fe-0c7d-448a-bfdc-f97c66b04870-config-volume\") pod \"collect-profiles-29411280-tst2l\" (UID: \"9bf185fe-0c7d-448a-bfdc-f97c66b04870\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411280-tst2l" Dec 02 12:00:00 crc kubenswrapper[4721]: I1202 12:00:00.410910 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9bf185fe-0c7d-448a-bfdc-f97c66b04870-secret-volume\") pod \"collect-profiles-29411280-tst2l\" (UID: \"9bf185fe-0c7d-448a-bfdc-f97c66b04870\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411280-tst2l" Dec 02 12:00:00 crc kubenswrapper[4721]: I1202 12:00:00.419565 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pbz7c\" (UniqueName: \"kubernetes.io/projected/9bf185fe-0c7d-448a-bfdc-f97c66b04870-kube-api-access-pbz7c\") pod \"collect-profiles-29411280-tst2l\" (UID: \"9bf185fe-0c7d-448a-bfdc-f97c66b04870\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411280-tst2l" Dec 02 12:00:00 crc kubenswrapper[4721]: I1202 12:00:00.503819 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411280-tst2l" Dec 02 12:00:00 crc kubenswrapper[4721]: I1202 12:00:00.682629 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411280-tst2l"] Dec 02 12:00:01 crc kubenswrapper[4721]: I1202 12:00:01.223292 4721 generic.go:334] "Generic (PLEG): container finished" podID="9bf185fe-0c7d-448a-bfdc-f97c66b04870" containerID="c6ba523319f856a9b962b1252cd02f36f6ded90af09d0ee35cf2ca890373f5ae" exitCode=0 Dec 02 12:00:01 crc kubenswrapper[4721]: I1202 12:00:01.223363 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411280-tst2l" event={"ID":"9bf185fe-0c7d-448a-bfdc-f97c66b04870","Type":"ContainerDied","Data":"c6ba523319f856a9b962b1252cd02f36f6ded90af09d0ee35cf2ca890373f5ae"} Dec 02 12:00:01 crc kubenswrapper[4721]: I1202 12:00:01.223604 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411280-tst2l" event={"ID":"9bf185fe-0c7d-448a-bfdc-f97c66b04870","Type":"ContainerStarted","Data":"7a2ab4d77983631f7c8f67ae8dca0d505d5d5c49d7efa7a0702178f5bcbeef78"} Dec 02 12:00:02 crc kubenswrapper[4721]: I1202 12:00:02.451107 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411280-tst2l" Dec 02 12:00:02 crc kubenswrapper[4721]: I1202 12:00:02.630804 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pbz7c\" (UniqueName: \"kubernetes.io/projected/9bf185fe-0c7d-448a-bfdc-f97c66b04870-kube-api-access-pbz7c\") pod \"9bf185fe-0c7d-448a-bfdc-f97c66b04870\" (UID: \"9bf185fe-0c7d-448a-bfdc-f97c66b04870\") " Dec 02 12:00:02 crc kubenswrapper[4721]: I1202 12:00:02.630890 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9bf185fe-0c7d-448a-bfdc-f97c66b04870-config-volume\") pod \"9bf185fe-0c7d-448a-bfdc-f97c66b04870\" (UID: \"9bf185fe-0c7d-448a-bfdc-f97c66b04870\") " Dec 02 12:00:02 crc kubenswrapper[4721]: I1202 12:00:02.630932 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9bf185fe-0c7d-448a-bfdc-f97c66b04870-secret-volume\") pod \"9bf185fe-0c7d-448a-bfdc-f97c66b04870\" (UID: \"9bf185fe-0c7d-448a-bfdc-f97c66b04870\") " Dec 02 12:00:02 crc kubenswrapper[4721]: I1202 12:00:02.632258 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9bf185fe-0c7d-448a-bfdc-f97c66b04870-config-volume" (OuterVolumeSpecName: "config-volume") pod "9bf185fe-0c7d-448a-bfdc-f97c66b04870" (UID: "9bf185fe-0c7d-448a-bfdc-f97c66b04870"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:00:02 crc kubenswrapper[4721]: I1202 12:00:02.636365 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bf185fe-0c7d-448a-bfdc-f97c66b04870-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9bf185fe-0c7d-448a-bfdc-f97c66b04870" (UID: "9bf185fe-0c7d-448a-bfdc-f97c66b04870"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:00:02 crc kubenswrapper[4721]: I1202 12:00:02.636577 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9bf185fe-0c7d-448a-bfdc-f97c66b04870-kube-api-access-pbz7c" (OuterVolumeSpecName: "kube-api-access-pbz7c") pod "9bf185fe-0c7d-448a-bfdc-f97c66b04870" (UID: "9bf185fe-0c7d-448a-bfdc-f97c66b04870"). InnerVolumeSpecName "kube-api-access-pbz7c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:00:02 crc kubenswrapper[4721]: I1202 12:00:02.732878 4721 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9bf185fe-0c7d-448a-bfdc-f97c66b04870-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 12:00:02 crc kubenswrapper[4721]: I1202 12:00:02.732915 4721 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9bf185fe-0c7d-448a-bfdc-f97c66b04870-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 12:00:02 crc kubenswrapper[4721]: I1202 12:00:02.732931 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pbz7c\" (UniqueName: \"kubernetes.io/projected/9bf185fe-0c7d-448a-bfdc-f97c66b04870-kube-api-access-pbz7c\") on node \"crc\" DevicePath \"\"" Dec 02 12:00:03 crc kubenswrapper[4721]: I1202 12:00:03.237135 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411280-tst2l" event={"ID":"9bf185fe-0c7d-448a-bfdc-f97c66b04870","Type":"ContainerDied","Data":"7a2ab4d77983631f7c8f67ae8dca0d505d5d5c49d7efa7a0702178f5bcbeef78"} Dec 02 12:00:03 crc kubenswrapper[4721]: I1202 12:00:03.237179 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7a2ab4d77983631f7c8f67ae8dca0d505d5d5c49d7efa7a0702178f5bcbeef78" Dec 02 12:00:03 crc kubenswrapper[4721]: I1202 12:00:03.237224 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411280-tst2l" Dec 02 12:00:10 crc kubenswrapper[4721]: I1202 12:00:10.997573 4721 scope.go:117] "RemoveContainer" containerID="c0c3683e38762dd5217e7f03057453a9ae51f782d3c88073a73f8a70cbc08b41" Dec 02 12:00:25 crc kubenswrapper[4721]: I1202 12:00:25.703585 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mcr6s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 12:00:25 crc kubenswrapper[4721]: I1202 12:00:25.704118 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 12:00:55 crc kubenswrapper[4721]: I1202 12:00:55.703522 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mcr6s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 12:00:55 crc kubenswrapper[4721]: I1202 12:00:55.704305 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 12:01:25 crc kubenswrapper[4721]: I1202 12:01:25.702597 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mcr6s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 12:01:25 crc kubenswrapper[4721]: I1202 12:01:25.703132 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 12:01:25 crc kubenswrapper[4721]: I1202 12:01:25.703183 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" Dec 02 12:01:25 crc kubenswrapper[4721]: I1202 12:01:25.704024 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2b5a3e04a093afd734df76e21cc539ac2697b5e275c6cc77be5a62e443752e2f"} pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 12:01:25 crc kubenswrapper[4721]: I1202 12:01:25.704092 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" containerID="cri-o://2b5a3e04a093afd734df76e21cc539ac2697b5e275c6cc77be5a62e443752e2f" gracePeriod=600 Dec 02 12:01:26 crc kubenswrapper[4721]: I1202 12:01:26.715124 4721 generic.go:334] "Generic (PLEG): container finished" podID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerID="2b5a3e04a093afd734df76e21cc539ac2697b5e275c6cc77be5a62e443752e2f" exitCode=0 Dec 02 12:01:26 crc kubenswrapper[4721]: I1202 12:01:26.715194 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" event={"ID":"141842b6-fd0c-48e6-9795-a627e6f96bd4","Type":"ContainerDied","Data":"2b5a3e04a093afd734df76e21cc539ac2697b5e275c6cc77be5a62e443752e2f"} Dec 02 12:01:26 crc kubenswrapper[4721]: I1202 12:01:26.715684 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" event={"ID":"141842b6-fd0c-48e6-9795-a627e6f96bd4","Type":"ContainerStarted","Data":"240b7cf6f3c5851e9a4d93bbfdd59e21dcf90d1e5f54d8148006085d773e5e7b"} Dec 02 12:01:26 crc kubenswrapper[4721]: I1202 12:01:26.715704 4721 scope.go:117] "RemoveContainer" containerID="af0d455046abe7542ae5be55dc88c0a9b4a1f4a60770dc97714ed00134bf7dd9" Dec 02 12:03:14 crc kubenswrapper[4721]: I1202 12:03:14.933760 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-gbxxd"] Dec 02 12:03:14 crc kubenswrapper[4721]: I1202 12:03:14.939607 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="ovn-controller" containerID="cri-o://16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661" gracePeriod=30 Dec 02 12:03:14 crc kubenswrapper[4721]: I1202 12:03:14.939922 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="northd" containerID="cri-o://467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39" gracePeriod=30 Dec 02 12:03:14 crc kubenswrapper[4721]: I1202 12:03:14.940017 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476" gracePeriod=30 Dec 02 12:03:14 crc kubenswrapper[4721]: I1202 12:03:14.940035 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="kube-rbac-proxy-node" containerID="cri-o://4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487" gracePeriod=30 Dec 02 12:03:14 crc kubenswrapper[4721]: I1202 12:03:14.940047 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="ovn-acl-logging" containerID="cri-o://a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b" gracePeriod=30 Dec 02 12:03:14 crc kubenswrapper[4721]: I1202 12:03:14.940334 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="sbdb" containerID="cri-o://527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac" gracePeriod=30 Dec 02 12:03:14 crc kubenswrapper[4721]: I1202 12:03:14.940416 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="nbdb" containerID="cri-o://769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0" gracePeriod=30 Dec 02 12:03:14 crc kubenswrapper[4721]: I1202 12:03:14.969529 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="ovnkube-controller" containerID="cri-o://22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282" gracePeriod=30 Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.232695 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gbxxd_e2a903a9-d920-48ed-8450-fa2a4fdb458b/ovnkube-controller/3.log" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.234839 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gbxxd_e2a903a9-d920-48ed-8450-fa2a4fdb458b/ovn-acl-logging/0.log" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.235229 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gbxxd_e2a903a9-d920-48ed-8450-fa2a4fdb458b/ovn-controller/0.log" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.235609 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.288101 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-8nhdp"] Dec 02 12:03:15 crc kubenswrapper[4721]: E1202 12:03:15.288286 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="kubecfg-setup" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.288297 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="kubecfg-setup" Dec 02 12:03:15 crc kubenswrapper[4721]: E1202 12:03:15.288307 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="ovn-controller" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.288312 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="ovn-controller" Dec 02 12:03:15 crc kubenswrapper[4721]: E1202 12:03:15.288322 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="northd" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.288328 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="northd" Dec 02 12:03:15 crc kubenswrapper[4721]: E1202 12:03:15.288337 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="ovnkube-controller" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.288379 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="ovnkube-controller" Dec 02 12:03:15 crc kubenswrapper[4721]: E1202 12:03:15.288387 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="nbdb" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.288393 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="nbdb" Dec 02 12:03:15 crc kubenswrapper[4721]: E1202 12:03:15.288401 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bf185fe-0c7d-448a-bfdc-f97c66b04870" containerName="collect-profiles" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.288406 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bf185fe-0c7d-448a-bfdc-f97c66b04870" containerName="collect-profiles" Dec 02 12:03:15 crc kubenswrapper[4721]: E1202 12:03:15.288417 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="sbdb" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.288423 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="sbdb" Dec 02 12:03:15 crc kubenswrapper[4721]: E1202 12:03:15.288431 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="ovnkube-controller" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.288437 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="ovnkube-controller" Dec 02 12:03:15 crc kubenswrapper[4721]: E1202 12:03:15.288443 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="ovnkube-controller" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.288449 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="ovnkube-controller" Dec 02 12:03:15 crc kubenswrapper[4721]: E1202 12:03:15.288456 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="kube-rbac-proxy-node" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.288462 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="kube-rbac-proxy-node" Dec 02 12:03:15 crc kubenswrapper[4721]: E1202 12:03:15.288470 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="ovn-acl-logging" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.288476 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="ovn-acl-logging" Dec 02 12:03:15 crc kubenswrapper[4721]: E1202 12:03:15.288486 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="kube-rbac-proxy-ovn-metrics" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.288493 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="kube-rbac-proxy-ovn-metrics" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.288572 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="kube-rbac-proxy-ovn-metrics" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.288582 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="ovn-acl-logging" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.288591 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="northd" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.288597 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="nbdb" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.288603 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="kube-rbac-proxy-node" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.288612 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="ovnkube-controller" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.288619 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bf185fe-0c7d-448a-bfdc-f97c66b04870" containerName="collect-profiles" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.288627 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="ovn-controller" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.288634 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="ovnkube-controller" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.288641 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="ovnkube-controller" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.288649 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="ovnkube-controller" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.288657 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="ovnkube-controller" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.288663 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="sbdb" Dec 02 12:03:15 crc kubenswrapper[4721]: E1202 12:03:15.288737 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="ovnkube-controller" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.288743 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="ovnkube-controller" Dec 02 12:03:15 crc kubenswrapper[4721]: E1202 12:03:15.288895 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="ovnkube-controller" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.288902 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerName="ovnkube-controller" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.290270 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340212 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-run-ovn\") pod \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340248 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-node-log\") pod \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340265 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-var-lib-cni-networks-ovn-kubernetes\") pod \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340300 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e2a903a9-d920-48ed-8450-fa2a4fdb458b-env-overrides\") pod \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340317 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-cni-netd\") pod \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340338 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-var-lib-openvswitch\") pod \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340366 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-node-log" (OuterVolumeSpecName: "node-log") pod "e2a903a9-d920-48ed-8450-fa2a4fdb458b" (UID: "e2a903a9-d920-48ed-8450-fa2a4fdb458b"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340380 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-run-systemd\") pod \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340383 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "e2a903a9-d920-48ed-8450-fa2a4fdb458b" (UID: "e2a903a9-d920-48ed-8450-fa2a4fdb458b"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340418 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "e2a903a9-d920-48ed-8450-fa2a4fdb458b" (UID: "e2a903a9-d920-48ed-8450-fa2a4fdb458b"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340398 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "e2a903a9-d920-48ed-8450-fa2a4fdb458b" (UID: "e2a903a9-d920-48ed-8450-fa2a4fdb458b"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340419 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-slash" (OuterVolumeSpecName: "host-slash") pod "e2a903a9-d920-48ed-8450-fa2a4fdb458b" (UID: "e2a903a9-d920-48ed-8450-fa2a4fdb458b"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340442 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "e2a903a9-d920-48ed-8450-fa2a4fdb458b" (UID: "e2a903a9-d920-48ed-8450-fa2a4fdb458b"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340399 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-slash\") pod \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340515 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e2a903a9-d920-48ed-8450-fa2a4fdb458b-ovnkube-script-lib\") pod \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340542 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e2a903a9-d920-48ed-8450-fa2a4fdb458b-ovnkube-config\") pod \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340559 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-kubelet\") pod \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340575 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-run-netns\") pod \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340601 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e2a903a9-d920-48ed-8450-fa2a4fdb458b-ovn-node-metrics-cert\") pod \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340616 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-etc-openvswitch\") pod \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340630 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-run-ovn-kubernetes\") pod \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340654 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-log-socket\") pod \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340661 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "e2a903a9-d920-48ed-8450-fa2a4fdb458b" (UID: "e2a903a9-d920-48ed-8450-fa2a4fdb458b"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340673 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-systemd-units\") pod \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340700 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "e2a903a9-d920-48ed-8450-fa2a4fdb458b" (UID: "e2a903a9-d920-48ed-8450-fa2a4fdb458b"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340707 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5c4m9\" (UniqueName: \"kubernetes.io/projected/e2a903a9-d920-48ed-8450-fa2a4fdb458b-kube-api-access-5c4m9\") pod \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340728 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-cni-bin\") pod \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340749 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-run-openvswitch\") pod \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\" (UID: \"e2a903a9-d920-48ed-8450-fa2a4fdb458b\") " Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340707 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "e2a903a9-d920-48ed-8450-fa2a4fdb458b" (UID: "e2a903a9-d920-48ed-8450-fa2a4fdb458b"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340732 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-log-socket" (OuterVolumeSpecName: "log-socket") pod "e2a903a9-d920-48ed-8450-fa2a4fdb458b" (UID: "e2a903a9-d920-48ed-8450-fa2a4fdb458b"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340754 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "e2a903a9-d920-48ed-8450-fa2a4fdb458b" (UID: "e2a903a9-d920-48ed-8450-fa2a4fdb458b"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340800 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2a903a9-d920-48ed-8450-fa2a4fdb458b-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "e2a903a9-d920-48ed-8450-fa2a4fdb458b" (UID: "e2a903a9-d920-48ed-8450-fa2a4fdb458b"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340854 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "e2a903a9-d920-48ed-8450-fa2a4fdb458b" (UID: "e2a903a9-d920-48ed-8450-fa2a4fdb458b"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340854 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "e2a903a9-d920-48ed-8450-fa2a4fdb458b" (UID: "e2a903a9-d920-48ed-8450-fa2a4fdb458b"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340869 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "e2a903a9-d920-48ed-8450-fa2a4fdb458b" (UID: "e2a903a9-d920-48ed-8450-fa2a4fdb458b"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.340919 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2a903a9-d920-48ed-8450-fa2a4fdb458b-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "e2a903a9-d920-48ed-8450-fa2a4fdb458b" (UID: "e2a903a9-d920-48ed-8450-fa2a4fdb458b"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.341042 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2a903a9-d920-48ed-8450-fa2a4fdb458b-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "e2a903a9-d920-48ed-8450-fa2a4fdb458b" (UID: "e2a903a9-d920-48ed-8450-fa2a4fdb458b"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.341133 4721 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e2a903a9-d920-48ed-8450-fa2a4fdb458b-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.341147 4721 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.341156 4721 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.341163 4721 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.341171 4721 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.341185 4721 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-log-socket\") on node \"crc\" DevicePath \"\"" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.341192 4721 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.341199 4721 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.341207 4721 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.341215 4721 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.341226 4721 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.341235 4721 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-node-log\") on node \"crc\" DevicePath \"\"" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.341243 4721 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.341251 4721 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e2a903a9-d920-48ed-8450-fa2a4fdb458b-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.341259 4721 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.341268 4721 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-host-slash\") on node \"crc\" DevicePath \"\"" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.341275 4721 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e2a903a9-d920-48ed-8450-fa2a4fdb458b-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.346788 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2a903a9-d920-48ed-8450-fa2a4fdb458b-kube-api-access-5c4m9" (OuterVolumeSpecName: "kube-api-access-5c4m9") pod "e2a903a9-d920-48ed-8450-fa2a4fdb458b" (UID: "e2a903a9-d920-48ed-8450-fa2a4fdb458b"). InnerVolumeSpecName "kube-api-access-5c4m9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.349564 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2a903a9-d920-48ed-8450-fa2a4fdb458b-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "e2a903a9-d920-48ed-8450-fa2a4fdb458b" (UID: "e2a903a9-d920-48ed-8450-fa2a4fdb458b"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.352421 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "e2a903a9-d920-48ed-8450-fa2a4fdb458b" (UID: "e2a903a9-d920-48ed-8450-fa2a4fdb458b"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.385763 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gbxxd_e2a903a9-d920-48ed-8450-fa2a4fdb458b/ovnkube-controller/3.log" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.387986 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gbxxd_e2a903a9-d920-48ed-8450-fa2a4fdb458b/ovn-acl-logging/0.log" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.388449 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gbxxd_e2a903a9-d920-48ed-8450-fa2a4fdb458b/ovn-controller/0.log" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.388783 4721 generic.go:334] "Generic (PLEG): container finished" podID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerID="22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282" exitCode=0 Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.388816 4721 generic.go:334] "Generic (PLEG): container finished" podID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerID="527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac" exitCode=0 Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.388823 4721 generic.go:334] "Generic (PLEG): container finished" podID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerID="769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0" exitCode=0 Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.388831 4721 generic.go:334] "Generic (PLEG): container finished" podID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerID="467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39" exitCode=0 Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.388839 4721 generic.go:334] "Generic (PLEG): container finished" podID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerID="02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476" exitCode=0 Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.388846 4721 generic.go:334] "Generic (PLEG): container finished" podID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerID="4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487" exitCode=0 Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.388851 4721 generic.go:334] "Generic (PLEG): container finished" podID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerID="a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b" exitCode=143 Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.388862 4721 generic.go:334] "Generic (PLEG): container finished" podID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" containerID="16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661" exitCode=143 Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.388887 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" event={"ID":"e2a903a9-d920-48ed-8450-fa2a4fdb458b","Type":"ContainerDied","Data":"22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.388945 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" event={"ID":"e2a903a9-d920-48ed-8450-fa2a4fdb458b","Type":"ContainerDied","Data":"527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.388962 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" event={"ID":"e2a903a9-d920-48ed-8450-fa2a4fdb458b","Type":"ContainerDied","Data":"769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.388970 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.388976 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" event={"ID":"e2a903a9-d920-48ed-8450-fa2a4fdb458b","Type":"ContainerDied","Data":"467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.388993 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" event={"ID":"e2a903a9-d920-48ed-8450-fa2a4fdb458b","Type":"ContainerDied","Data":"02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389005 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" event={"ID":"e2a903a9-d920-48ed-8450-fa2a4fdb458b","Type":"ContainerDied","Data":"4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389011 4721 scope.go:117] "RemoveContainer" containerID="22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389019 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389034 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389041 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389047 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389055 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389062 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389069 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389075 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389082 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389092 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" event={"ID":"e2a903a9-d920-48ed-8450-fa2a4fdb458b","Type":"ContainerDied","Data":"a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389106 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389117 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389124 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389131 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389141 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389149 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389156 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389164 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389172 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389181 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389191 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" event={"ID":"e2a903a9-d920-48ed-8450-fa2a4fdb458b","Type":"ContainerDied","Data":"16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389202 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389211 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389218 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389225 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389232 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389239 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389246 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389253 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389259 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389266 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389275 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gbxxd" event={"ID":"e2a903a9-d920-48ed-8450-fa2a4fdb458b","Type":"ContainerDied","Data":"23a4985a9eba9c06f2df46284c711cba193c8454534555b65219ca8392293e7b"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389285 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389292 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389298 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389305 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389311 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389317 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389323 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389329 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389335 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.389362 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.390261 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jzz4m_3becf918-fa56-4f68-9963-b19fb91d4343/kube-multus/2.log" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.391380 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jzz4m_3becf918-fa56-4f68-9963-b19fb91d4343/kube-multus/1.log" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.391428 4721 generic.go:334] "Generic (PLEG): container finished" podID="3becf918-fa56-4f68-9963-b19fb91d4343" containerID="9b74c183f5d95883599f7ec24d387308f5c3735a7602b3d28efbfc1e8a1e4f8a" exitCode=2 Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.391459 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jzz4m" event={"ID":"3becf918-fa56-4f68-9963-b19fb91d4343","Type":"ContainerDied","Data":"9b74c183f5d95883599f7ec24d387308f5c3735a7602b3d28efbfc1e8a1e4f8a"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.391482 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4f55051ff5eedef4b085c57ab10b6545bc8d2abdc70c6e15444e4e0adcd5f659"} Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.391995 4721 scope.go:117] "RemoveContainer" containerID="9b74c183f5d95883599f7ec24d387308f5c3735a7602b3d28efbfc1e8a1e4f8a" Dec 02 12:03:15 crc kubenswrapper[4721]: E1202 12:03:15.392191 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-jzz4m_openshift-multus(3becf918-fa56-4f68-9963-b19fb91d4343)\"" pod="openshift-multus/multus-jzz4m" podUID="3becf918-fa56-4f68-9963-b19fb91d4343" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.404548 4721 scope.go:117] "RemoveContainer" containerID="57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.423520 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-gbxxd"] Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.429874 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-gbxxd"] Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.431406 4721 scope.go:117] "RemoveContainer" containerID="527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.442308 4721 scope.go:117] "RemoveContainer" containerID="769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.442355 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-host-kubelet\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.442409 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqgpx\" (UniqueName: \"kubernetes.io/projected/6e47be8d-225d-40eb-ab3c-e5b395193489-kube-api-access-hqgpx\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.442438 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6e47be8d-225d-40eb-ab3c-e5b395193489-ovn-node-metrics-cert\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.442462 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-host-run-ovn-kubernetes\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.442490 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6e47be8d-225d-40eb-ab3c-e5b395193489-ovnkube-script-lib\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.442509 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6e47be8d-225d-40eb-ab3c-e5b395193489-ovnkube-config\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.442528 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6e47be8d-225d-40eb-ab3c-e5b395193489-env-overrides\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.442554 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-run-openvswitch\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.442574 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-node-log\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.442707 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-etc-openvswitch\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.442781 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-host-slash\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.442810 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-var-lib-openvswitch\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.442842 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-host-cni-bin\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.442897 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.442955 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-run-systemd\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.442982 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-run-ovn\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.443040 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-systemd-units\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.443102 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-host-cni-netd\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.443139 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-log-socket\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.443195 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-host-run-netns\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.443245 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5c4m9\" (UniqueName: \"kubernetes.io/projected/e2a903a9-d920-48ed-8450-fa2a4fdb458b-kube-api-access-5c4m9\") on node \"crc\" DevicePath \"\"" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.443261 4721 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e2a903a9-d920-48ed-8450-fa2a4fdb458b-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.443275 4721 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e2a903a9-d920-48ed-8450-fa2a4fdb458b-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.457882 4721 scope.go:117] "RemoveContainer" containerID="467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.469618 4721 scope.go:117] "RemoveContainer" containerID="02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.481464 4721 scope.go:117] "RemoveContainer" containerID="4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.497181 4721 scope.go:117] "RemoveContainer" containerID="a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.513805 4721 scope.go:117] "RemoveContainer" containerID="16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.526189 4721 scope.go:117] "RemoveContainer" containerID="92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.536612 4721 scope.go:117] "RemoveContainer" containerID="22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282" Dec 02 12:03:15 crc kubenswrapper[4721]: E1202 12:03:15.536961 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282\": container with ID starting with 22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282 not found: ID does not exist" containerID="22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.536999 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282"} err="failed to get container status \"22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282\": rpc error: code = NotFound desc = could not find container \"22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282\": container with ID starting with 22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282 not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.537022 4721 scope.go:117] "RemoveContainer" containerID="57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938" Dec 02 12:03:15 crc kubenswrapper[4721]: E1202 12:03:15.537303 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938\": container with ID starting with 57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938 not found: ID does not exist" containerID="57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.537333 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938"} err="failed to get container status \"57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938\": rpc error: code = NotFound desc = could not find container \"57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938\": container with ID starting with 57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938 not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.537368 4721 scope.go:117] "RemoveContainer" containerID="527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac" Dec 02 12:03:15 crc kubenswrapper[4721]: E1202 12:03:15.537584 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\": container with ID starting with 527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac not found: ID does not exist" containerID="527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.537608 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac"} err="failed to get container status \"527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\": rpc error: code = NotFound desc = could not find container \"527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\": container with ID starting with 527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.537627 4721 scope.go:117] "RemoveContainer" containerID="769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0" Dec 02 12:03:15 crc kubenswrapper[4721]: E1202 12:03:15.537884 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\": container with ID starting with 769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0 not found: ID does not exist" containerID="769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.537904 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0"} err="failed to get container status \"769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\": rpc error: code = NotFound desc = could not find container \"769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\": container with ID starting with 769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0 not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.537922 4721 scope.go:117] "RemoveContainer" containerID="467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39" Dec 02 12:03:15 crc kubenswrapper[4721]: E1202 12:03:15.538152 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\": container with ID starting with 467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39 not found: ID does not exist" containerID="467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.538175 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39"} err="failed to get container status \"467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\": rpc error: code = NotFound desc = could not find container \"467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\": container with ID starting with 467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39 not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.538191 4721 scope.go:117] "RemoveContainer" containerID="02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476" Dec 02 12:03:15 crc kubenswrapper[4721]: E1202 12:03:15.538443 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\": container with ID starting with 02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476 not found: ID does not exist" containerID="02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.538463 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476"} err="failed to get container status \"02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\": rpc error: code = NotFound desc = could not find container \"02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\": container with ID starting with 02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476 not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.538478 4721 scope.go:117] "RemoveContainer" containerID="4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487" Dec 02 12:03:15 crc kubenswrapper[4721]: E1202 12:03:15.538695 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\": container with ID starting with 4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487 not found: ID does not exist" containerID="4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.538714 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487"} err="failed to get container status \"4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\": rpc error: code = NotFound desc = could not find container \"4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\": container with ID starting with 4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487 not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.538729 4721 scope.go:117] "RemoveContainer" containerID="a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b" Dec 02 12:03:15 crc kubenswrapper[4721]: E1202 12:03:15.538993 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\": container with ID starting with a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b not found: ID does not exist" containerID="a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.539018 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b"} err="failed to get container status \"a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\": rpc error: code = NotFound desc = could not find container \"a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\": container with ID starting with a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.539032 4721 scope.go:117] "RemoveContainer" containerID="16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661" Dec 02 12:03:15 crc kubenswrapper[4721]: E1202 12:03:15.539327 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\": container with ID starting with 16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661 not found: ID does not exist" containerID="16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.539366 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661"} err="failed to get container status \"16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\": rpc error: code = NotFound desc = could not find container \"16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\": container with ID starting with 16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661 not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.539385 4721 scope.go:117] "RemoveContainer" containerID="92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e" Dec 02 12:03:15 crc kubenswrapper[4721]: E1202 12:03:15.539635 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\": container with ID starting with 92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e not found: ID does not exist" containerID="92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.539656 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e"} err="failed to get container status \"92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\": rpc error: code = NotFound desc = could not find container \"92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\": container with ID starting with 92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.539671 4721 scope.go:117] "RemoveContainer" containerID="22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.539934 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282"} err="failed to get container status \"22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282\": rpc error: code = NotFound desc = could not find container \"22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282\": container with ID starting with 22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282 not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.539996 4721 scope.go:117] "RemoveContainer" containerID="57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.540298 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938"} err="failed to get container status \"57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938\": rpc error: code = NotFound desc = could not find container \"57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938\": container with ID starting with 57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938 not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.540322 4721 scope.go:117] "RemoveContainer" containerID="527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.540588 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac"} err="failed to get container status \"527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\": rpc error: code = NotFound desc = could not find container \"527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\": container with ID starting with 527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.540622 4721 scope.go:117] "RemoveContainer" containerID="769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.540878 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0"} err="failed to get container status \"769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\": rpc error: code = NotFound desc = could not find container \"769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\": container with ID starting with 769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0 not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.540917 4721 scope.go:117] "RemoveContainer" containerID="467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.541139 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39"} err="failed to get container status \"467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\": rpc error: code = NotFound desc = could not find container \"467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\": container with ID starting with 467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39 not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.541163 4721 scope.go:117] "RemoveContainer" containerID="02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.541521 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476"} err="failed to get container status \"02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\": rpc error: code = NotFound desc = could not find container \"02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\": container with ID starting with 02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476 not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.541544 4721 scope.go:117] "RemoveContainer" containerID="4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.541750 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487"} err="failed to get container status \"4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\": rpc error: code = NotFound desc = could not find container \"4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\": container with ID starting with 4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487 not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.541774 4721 scope.go:117] "RemoveContainer" containerID="a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.541961 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b"} err="failed to get container status \"a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\": rpc error: code = NotFound desc = could not find container \"a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\": container with ID starting with a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.541984 4721 scope.go:117] "RemoveContainer" containerID="16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.542226 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661"} err="failed to get container status \"16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\": rpc error: code = NotFound desc = could not find container \"16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\": container with ID starting with 16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661 not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.542251 4721 scope.go:117] "RemoveContainer" containerID="92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.542502 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e"} err="failed to get container status \"92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\": rpc error: code = NotFound desc = could not find container \"92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\": container with ID starting with 92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.542543 4721 scope.go:117] "RemoveContainer" containerID="22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.542779 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282"} err="failed to get container status \"22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282\": rpc error: code = NotFound desc = could not find container \"22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282\": container with ID starting with 22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282 not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.542806 4721 scope.go:117] "RemoveContainer" containerID="57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.543360 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938"} err="failed to get container status \"57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938\": rpc error: code = NotFound desc = could not find container \"57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938\": container with ID starting with 57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938 not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.543390 4721 scope.go:117] "RemoveContainer" containerID="527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.543618 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac"} err="failed to get container status \"527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\": rpc error: code = NotFound desc = could not find container \"527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\": container with ID starting with 527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.543647 4721 scope.go:117] "RemoveContainer" containerID="769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.543856 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-run-openvswitch\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.543887 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-node-log\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.543918 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-var-lib-openvswitch\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.543939 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-etc-openvswitch\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.543960 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-host-slash\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.543979 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-host-cni-bin\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544004 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-node-log\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544009 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-var-lib-openvswitch\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544009 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544030 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-host-slash\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544053 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-run-openvswitch\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544109 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-host-cni-bin\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544131 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544160 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-etc-openvswitch\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544174 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0"} err="failed to get container status \"769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\": rpc error: code = NotFound desc = could not find container \"769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\": container with ID starting with 769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0 not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544193 4721 scope.go:117] "RemoveContainer" containerID="467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544227 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-run-systemd\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544263 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-run-ovn\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544311 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-systemd-units\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544316 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-run-systemd\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544330 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-run-ovn\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544382 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-host-cni-netd\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544387 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-systemd-units\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544401 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-log-socket\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544421 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-host-run-netns\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544448 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-host-kubelet\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544448 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39"} err="failed to get container status \"467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\": rpc error: code = NotFound desc = could not find container \"467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\": container with ID starting with 467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39 not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544468 4721 scope.go:117] "RemoveContainer" containerID="02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544472 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqgpx\" (UniqueName: \"kubernetes.io/projected/6e47be8d-225d-40eb-ab3c-e5b395193489-kube-api-access-hqgpx\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544499 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6e47be8d-225d-40eb-ab3c-e5b395193489-ovn-node-metrics-cert\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544514 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-host-run-ovn-kubernetes\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544535 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6e47be8d-225d-40eb-ab3c-e5b395193489-ovnkube-script-lib\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544551 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6e47be8d-225d-40eb-ab3c-e5b395193489-ovnkube-config\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544566 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6e47be8d-225d-40eb-ab3c-e5b395193489-env-overrides\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544613 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-log-socket\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544641 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-host-cni-netd\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544687 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476"} err="failed to get container status \"02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\": rpc error: code = NotFound desc = could not find container \"02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\": container with ID starting with 02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476 not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544702 4721 scope.go:117] "RemoveContainer" containerID="4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544758 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-host-run-netns\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544938 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487"} err="failed to get container status \"4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\": rpc error: code = NotFound desc = could not find container \"4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\": container with ID starting with 4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487 not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.544958 4721 scope.go:117] "RemoveContainer" containerID="a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.545028 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-host-kubelet\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.545036 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6e47be8d-225d-40eb-ab3c-e5b395193489-env-overrides\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.545072 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6e47be8d-225d-40eb-ab3c-e5b395193489-host-run-ovn-kubernetes\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.545237 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b"} err="failed to get container status \"a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\": rpc error: code = NotFound desc = could not find container \"a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\": container with ID starting with a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.545256 4721 scope.go:117] "RemoveContainer" containerID="16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.545584 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6e47be8d-225d-40eb-ab3c-e5b395193489-ovnkube-script-lib\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.545734 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6e47be8d-225d-40eb-ab3c-e5b395193489-ovnkube-config\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.545926 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661"} err="failed to get container status \"16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\": rpc error: code = NotFound desc = could not find container \"16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\": container with ID starting with 16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661 not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.547468 4721 scope.go:117] "RemoveContainer" containerID="92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.548565 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6e47be8d-225d-40eb-ab3c-e5b395193489-ovn-node-metrics-cert\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.551622 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e"} err="failed to get container status \"92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\": rpc error: code = NotFound desc = could not find container \"92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\": container with ID starting with 92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.551646 4721 scope.go:117] "RemoveContainer" containerID="22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.551870 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282"} err="failed to get container status \"22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282\": rpc error: code = NotFound desc = could not find container \"22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282\": container with ID starting with 22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282 not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.551894 4721 scope.go:117] "RemoveContainer" containerID="57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.552112 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938"} err="failed to get container status \"57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938\": rpc error: code = NotFound desc = could not find container \"57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938\": container with ID starting with 57ebf9b0d16cf70d813ae6cd8bfc7f9d91e5dc27f7037db583c00b796825e938 not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.552132 4721 scope.go:117] "RemoveContainer" containerID="527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.552381 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac"} err="failed to get container status \"527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\": rpc error: code = NotFound desc = could not find container \"527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac\": container with ID starting with 527b73deeace403251a173a6413dc130b846cc858183ac4eb13c26b043072cac not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.552404 4721 scope.go:117] "RemoveContainer" containerID="769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.552641 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0"} err="failed to get container status \"769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\": rpc error: code = NotFound desc = could not find container \"769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0\": container with ID starting with 769f721a258096f09e1b4a4dbcbbad323413535e86b040047d9cbab893f4e6e0 not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.552662 4721 scope.go:117] "RemoveContainer" containerID="467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.552855 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39"} err="failed to get container status \"467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\": rpc error: code = NotFound desc = could not find container \"467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39\": container with ID starting with 467b888bbb93276800e777510789315d5d2e3633b4885eb37c3d532c69dc5b39 not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.552870 4721 scope.go:117] "RemoveContainer" containerID="02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.553139 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476"} err="failed to get container status \"02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\": rpc error: code = NotFound desc = could not find container \"02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476\": container with ID starting with 02990b8ad38ef18a2f037eda09858b94afd6ee35550a3d65870624041c977476 not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.553167 4721 scope.go:117] "RemoveContainer" containerID="4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.553481 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487"} err="failed to get container status \"4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\": rpc error: code = NotFound desc = could not find container \"4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487\": container with ID starting with 4ab6a1f255a78e43a93452aaff3c6cae9b47eacf0ff1fb2c65f61748994b2487 not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.553502 4721 scope.go:117] "RemoveContainer" containerID="a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.553738 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b"} err="failed to get container status \"a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\": rpc error: code = NotFound desc = could not find container \"a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b\": container with ID starting with a4833d2b970db4f756d2e8fec313752ef21b6595689a365b34ef4e44bd548d4b not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.553756 4721 scope.go:117] "RemoveContainer" containerID="16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.554004 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661"} err="failed to get container status \"16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\": rpc error: code = NotFound desc = could not find container \"16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661\": container with ID starting with 16ef26958381fcc050108a8e5d04a972929f66cca72e09a23901300d77019661 not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.554025 4721 scope.go:117] "RemoveContainer" containerID="92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.554265 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e"} err="failed to get container status \"92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\": rpc error: code = NotFound desc = could not find container \"92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e\": container with ID starting with 92895075fd5c84ff48683a07392fd326e11273aece445ec08e7dfa1fd800215e not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.554291 4721 scope.go:117] "RemoveContainer" containerID="22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.554566 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282"} err="failed to get container status \"22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282\": rpc error: code = NotFound desc = could not find container \"22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282\": container with ID starting with 22302d4930d93af9acb0af54070817e6c61e1c618f39d72049cc2aa68cecc282 not found: ID does not exist" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.565791 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqgpx\" (UniqueName: \"kubernetes.io/projected/6e47be8d-225d-40eb-ab3c-e5b395193489-kube-api-access-hqgpx\") pod \"ovnkube-node-8nhdp\" (UID: \"6e47be8d-225d-40eb-ab3c-e5b395193489\") " pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:15 crc kubenswrapper[4721]: I1202 12:03:15.621882 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:16 crc kubenswrapper[4721]: I1202 12:03:16.399319 4721 generic.go:334] "Generic (PLEG): container finished" podID="6e47be8d-225d-40eb-ab3c-e5b395193489" containerID="6833ab74658ae950684c67fff27b5125d67c76a3a14a0ef28ab08ac8080df27e" exitCode=0 Dec 02 12:03:16 crc kubenswrapper[4721]: I1202 12:03:16.399429 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" event={"ID":"6e47be8d-225d-40eb-ab3c-e5b395193489","Type":"ContainerDied","Data":"6833ab74658ae950684c67fff27b5125d67c76a3a14a0ef28ab08ac8080df27e"} Dec 02 12:03:16 crc kubenswrapper[4721]: I1202 12:03:16.399507 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" event={"ID":"6e47be8d-225d-40eb-ab3c-e5b395193489","Type":"ContainerStarted","Data":"90e7ce0203cb2f7dc9fe09791ae4b3fcb3ed1a19541b904b7d10d1399e46ee53"} Dec 02 12:03:16 crc kubenswrapper[4721]: I1202 12:03:16.854983 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2a903a9-d920-48ed-8450-fa2a4fdb458b" path="/var/lib/kubelet/pods/e2a903a9-d920-48ed-8450-fa2a4fdb458b/volumes" Dec 02 12:03:17 crc kubenswrapper[4721]: I1202 12:03:17.413162 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" event={"ID":"6e47be8d-225d-40eb-ab3c-e5b395193489","Type":"ContainerStarted","Data":"c773f34e55b4c2ec9f5c532ca496d0700a47d030aafb35b949d808addc71c145"} Dec 02 12:03:17 crc kubenswrapper[4721]: I1202 12:03:17.413206 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" event={"ID":"6e47be8d-225d-40eb-ab3c-e5b395193489","Type":"ContainerStarted","Data":"e265ae2d771a1139c1834fbd2f4449f6f172b785511dc12e19e686b3dfdf59f5"} Dec 02 12:03:17 crc kubenswrapper[4721]: I1202 12:03:17.413217 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" event={"ID":"6e47be8d-225d-40eb-ab3c-e5b395193489","Type":"ContainerStarted","Data":"aaa85af8cd995c0c8c4e88ff215a6f8093ee4e024350e549ce53f5ebb87e1f10"} Dec 02 12:03:17 crc kubenswrapper[4721]: I1202 12:03:17.413228 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" event={"ID":"6e47be8d-225d-40eb-ab3c-e5b395193489","Type":"ContainerStarted","Data":"000009b6f04d4af0a694aac48407ae476058e65996f38fce81a78de257cb73bd"} Dec 02 12:03:17 crc kubenswrapper[4721]: I1202 12:03:17.413240 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" event={"ID":"6e47be8d-225d-40eb-ab3c-e5b395193489","Type":"ContainerStarted","Data":"551adda46749ac81a1c9b62e307f4a058af6d037690520cdf313299c66eb8218"} Dec 02 12:03:17 crc kubenswrapper[4721]: I1202 12:03:17.413248 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" event={"ID":"6e47be8d-225d-40eb-ab3c-e5b395193489","Type":"ContainerStarted","Data":"fb29470da13e76f604ee52f68d77cef5139b5d580c96dd55bf15d60318470e9b"} Dec 02 12:03:20 crc kubenswrapper[4721]: I1202 12:03:20.431159 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" event={"ID":"6e47be8d-225d-40eb-ab3c-e5b395193489","Type":"ContainerStarted","Data":"a5947de710c58178dad4529c6df3525ca0fdfef890c4828db010da866664ebb8"} Dec 02 12:03:22 crc kubenswrapper[4721]: I1202 12:03:22.444159 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" event={"ID":"6e47be8d-225d-40eb-ab3c-e5b395193489","Type":"ContainerStarted","Data":"a854f286176a69eea83ba6ff16d37e1f567d4744fa988ed18aa01b10f13bd16b"} Dec 02 12:03:22 crc kubenswrapper[4721]: I1202 12:03:22.444718 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:22 crc kubenswrapper[4721]: I1202 12:03:22.444735 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:22 crc kubenswrapper[4721]: I1202 12:03:22.444745 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:22 crc kubenswrapper[4721]: I1202 12:03:22.469656 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:22 crc kubenswrapper[4721]: I1202 12:03:22.471465 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:22 crc kubenswrapper[4721]: I1202 12:03:22.474692 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" podStartSLOduration=7.474680449 podStartE2EDuration="7.474680449s" podCreationTimestamp="2025-12-02 12:03:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:03:22.473250605 +0000 UTC m=+732.211800954" watchObservedRunningTime="2025-12-02 12:03:22.474680449 +0000 UTC m=+732.213230798" Dec 02 12:03:28 crc kubenswrapper[4721]: I1202 12:03:28.841203 4721 scope.go:117] "RemoveContainer" containerID="9b74c183f5d95883599f7ec24d387308f5c3735a7602b3d28efbfc1e8a1e4f8a" Dec 02 12:03:29 crc kubenswrapper[4721]: I1202 12:03:29.485820 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jzz4m_3becf918-fa56-4f68-9963-b19fb91d4343/kube-multus/2.log" Dec 02 12:03:29 crc kubenswrapper[4721]: I1202 12:03:29.487004 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jzz4m_3becf918-fa56-4f68-9963-b19fb91d4343/kube-multus/1.log" Dec 02 12:03:29 crc kubenswrapper[4721]: I1202 12:03:29.487071 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jzz4m" event={"ID":"3becf918-fa56-4f68-9963-b19fb91d4343","Type":"ContainerStarted","Data":"168a73951e64df94761f2faee492090f6f3a9e848c8f4baa5465117e68e1edd2"} Dec 02 12:03:41 crc kubenswrapper[4721]: I1202 12:03:41.203587 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl"] Dec 02 12:03:41 crc kubenswrapper[4721]: I1202 12:03:41.205090 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl" Dec 02 12:03:41 crc kubenswrapper[4721]: I1202 12:03:41.208747 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 02 12:03:41 crc kubenswrapper[4721]: I1202 12:03:41.215862 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl"] Dec 02 12:03:41 crc kubenswrapper[4721]: I1202 12:03:41.372322 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ksgl\" (UniqueName: \"kubernetes.io/projected/dc649529-91a2-4d31-9c9c-1ad1523d7182-kube-api-access-9ksgl\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl\" (UID: \"dc649529-91a2-4d31-9c9c-1ad1523d7182\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl" Dec 02 12:03:41 crc kubenswrapper[4721]: I1202 12:03:41.372455 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dc649529-91a2-4d31-9c9c-1ad1523d7182-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl\" (UID: \"dc649529-91a2-4d31-9c9c-1ad1523d7182\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl" Dec 02 12:03:41 crc kubenswrapper[4721]: I1202 12:03:41.372493 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dc649529-91a2-4d31-9c9c-1ad1523d7182-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl\" (UID: \"dc649529-91a2-4d31-9c9c-1ad1523d7182\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl" Dec 02 12:03:41 crc kubenswrapper[4721]: I1202 12:03:41.473867 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dc649529-91a2-4d31-9c9c-1ad1523d7182-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl\" (UID: \"dc649529-91a2-4d31-9c9c-1ad1523d7182\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl" Dec 02 12:03:41 crc kubenswrapper[4721]: I1202 12:03:41.473924 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dc649529-91a2-4d31-9c9c-1ad1523d7182-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl\" (UID: \"dc649529-91a2-4d31-9c9c-1ad1523d7182\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl" Dec 02 12:03:41 crc kubenswrapper[4721]: I1202 12:03:41.474042 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ksgl\" (UniqueName: \"kubernetes.io/projected/dc649529-91a2-4d31-9c9c-1ad1523d7182-kube-api-access-9ksgl\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl\" (UID: \"dc649529-91a2-4d31-9c9c-1ad1523d7182\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl" Dec 02 12:03:41 crc kubenswrapper[4721]: I1202 12:03:41.474494 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dc649529-91a2-4d31-9c9c-1ad1523d7182-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl\" (UID: \"dc649529-91a2-4d31-9c9c-1ad1523d7182\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl" Dec 02 12:03:41 crc kubenswrapper[4721]: I1202 12:03:41.474723 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dc649529-91a2-4d31-9c9c-1ad1523d7182-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl\" (UID: \"dc649529-91a2-4d31-9c9c-1ad1523d7182\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl" Dec 02 12:03:41 crc kubenswrapper[4721]: I1202 12:03:41.495974 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ksgl\" (UniqueName: \"kubernetes.io/projected/dc649529-91a2-4d31-9c9c-1ad1523d7182-kube-api-access-9ksgl\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl\" (UID: \"dc649529-91a2-4d31-9c9c-1ad1523d7182\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl" Dec 02 12:03:41 crc kubenswrapper[4721]: I1202 12:03:41.530433 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl" Dec 02 12:03:41 crc kubenswrapper[4721]: I1202 12:03:41.772422 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl"] Dec 02 12:03:42 crc kubenswrapper[4721]: I1202 12:03:42.574581 4721 generic.go:334] "Generic (PLEG): container finished" podID="dc649529-91a2-4d31-9c9c-1ad1523d7182" containerID="fa392620b13347c2a0a9cf393caaf1c075734759eefcd943399786adca695446" exitCode=0 Dec 02 12:03:42 crc kubenswrapper[4721]: I1202 12:03:42.574696 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl" event={"ID":"dc649529-91a2-4d31-9c9c-1ad1523d7182","Type":"ContainerDied","Data":"fa392620b13347c2a0a9cf393caaf1c075734759eefcd943399786adca695446"} Dec 02 12:03:42 crc kubenswrapper[4721]: I1202 12:03:42.574907 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl" event={"ID":"dc649529-91a2-4d31-9c9c-1ad1523d7182","Type":"ContainerStarted","Data":"0d768e79598fb13d1f392fc4750c59a57288cec4a0f459b6a8561fa45df71513"} Dec 02 12:03:42 crc kubenswrapper[4721]: I1202 12:03:42.578103 4721 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 12:03:44 crc kubenswrapper[4721]: I1202 12:03:44.586125 4721 generic.go:334] "Generic (PLEG): container finished" podID="dc649529-91a2-4d31-9c9c-1ad1523d7182" containerID="837a503b97771d87efcee2e55697e640a7d3ed409e127ab794ea6a7c943de897" exitCode=0 Dec 02 12:03:44 crc kubenswrapper[4721]: I1202 12:03:44.586203 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl" event={"ID":"dc649529-91a2-4d31-9c9c-1ad1523d7182","Type":"ContainerDied","Data":"837a503b97771d87efcee2e55697e640a7d3ed409e127ab794ea6a7c943de897"} Dec 02 12:03:45 crc kubenswrapper[4721]: I1202 12:03:45.553832 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ktss9"] Dec 02 12:03:45 crc kubenswrapper[4721]: I1202 12:03:45.554744 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ktss9" Dec 02 12:03:45 crc kubenswrapper[4721]: I1202 12:03:45.562569 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ktss9"] Dec 02 12:03:45 crc kubenswrapper[4721]: I1202 12:03:45.615817 4721 generic.go:334] "Generic (PLEG): container finished" podID="dc649529-91a2-4d31-9c9c-1ad1523d7182" containerID="9d7cac6fd1c68475de8480e1afc511f61169d62171a7a64c69969409c3478f58" exitCode=0 Dec 02 12:03:45 crc kubenswrapper[4721]: I1202 12:03:45.615859 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl" event={"ID":"dc649529-91a2-4d31-9c9c-1ad1523d7182","Type":"ContainerDied","Data":"9d7cac6fd1c68475de8480e1afc511f61169d62171a7a64c69969409c3478f58"} Dec 02 12:03:45 crc kubenswrapper[4721]: I1202 12:03:45.624850 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/664a2f9f-e6d5-40c5-a599-9905670fc63e-catalog-content\") pod \"redhat-operators-ktss9\" (UID: \"664a2f9f-e6d5-40c5-a599-9905670fc63e\") " pod="openshift-marketplace/redhat-operators-ktss9" Dec 02 12:03:45 crc kubenswrapper[4721]: I1202 12:03:45.624917 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/664a2f9f-e6d5-40c5-a599-9905670fc63e-utilities\") pod \"redhat-operators-ktss9\" (UID: \"664a2f9f-e6d5-40c5-a599-9905670fc63e\") " pod="openshift-marketplace/redhat-operators-ktss9" Dec 02 12:03:45 crc kubenswrapper[4721]: I1202 12:03:45.624958 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tw9kv\" (UniqueName: \"kubernetes.io/projected/664a2f9f-e6d5-40c5-a599-9905670fc63e-kube-api-access-tw9kv\") pod \"redhat-operators-ktss9\" (UID: \"664a2f9f-e6d5-40c5-a599-9905670fc63e\") " pod="openshift-marketplace/redhat-operators-ktss9" Dec 02 12:03:45 crc kubenswrapper[4721]: I1202 12:03:45.651476 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-8nhdp" Dec 02 12:03:45 crc kubenswrapper[4721]: I1202 12:03:45.725842 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/664a2f9f-e6d5-40c5-a599-9905670fc63e-catalog-content\") pod \"redhat-operators-ktss9\" (UID: \"664a2f9f-e6d5-40c5-a599-9905670fc63e\") " pod="openshift-marketplace/redhat-operators-ktss9" Dec 02 12:03:45 crc kubenswrapper[4721]: I1202 12:03:45.725909 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/664a2f9f-e6d5-40c5-a599-9905670fc63e-utilities\") pod \"redhat-operators-ktss9\" (UID: \"664a2f9f-e6d5-40c5-a599-9905670fc63e\") " pod="openshift-marketplace/redhat-operators-ktss9" Dec 02 12:03:45 crc kubenswrapper[4721]: I1202 12:03:45.725959 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tw9kv\" (UniqueName: \"kubernetes.io/projected/664a2f9f-e6d5-40c5-a599-9905670fc63e-kube-api-access-tw9kv\") pod \"redhat-operators-ktss9\" (UID: \"664a2f9f-e6d5-40c5-a599-9905670fc63e\") " pod="openshift-marketplace/redhat-operators-ktss9" Dec 02 12:03:45 crc kubenswrapper[4721]: I1202 12:03:45.726500 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/664a2f9f-e6d5-40c5-a599-9905670fc63e-catalog-content\") pod \"redhat-operators-ktss9\" (UID: \"664a2f9f-e6d5-40c5-a599-9905670fc63e\") " pod="openshift-marketplace/redhat-operators-ktss9" Dec 02 12:03:45 crc kubenswrapper[4721]: I1202 12:03:45.726906 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/664a2f9f-e6d5-40c5-a599-9905670fc63e-utilities\") pod \"redhat-operators-ktss9\" (UID: \"664a2f9f-e6d5-40c5-a599-9905670fc63e\") " pod="openshift-marketplace/redhat-operators-ktss9" Dec 02 12:03:45 crc kubenswrapper[4721]: I1202 12:03:45.748015 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tw9kv\" (UniqueName: \"kubernetes.io/projected/664a2f9f-e6d5-40c5-a599-9905670fc63e-kube-api-access-tw9kv\") pod \"redhat-operators-ktss9\" (UID: \"664a2f9f-e6d5-40c5-a599-9905670fc63e\") " pod="openshift-marketplace/redhat-operators-ktss9" Dec 02 12:03:45 crc kubenswrapper[4721]: I1202 12:03:45.917235 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ktss9" Dec 02 12:03:46 crc kubenswrapper[4721]: I1202 12:03:46.295749 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ktss9"] Dec 02 12:03:46 crc kubenswrapper[4721]: I1202 12:03:46.621457 4721 generic.go:334] "Generic (PLEG): container finished" podID="664a2f9f-e6d5-40c5-a599-9905670fc63e" containerID="c36a3a19779b394642dee8275db7d78e52c420510f3a69ff202ffdc30938910a" exitCode=0 Dec 02 12:03:46 crc kubenswrapper[4721]: I1202 12:03:46.621564 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ktss9" event={"ID":"664a2f9f-e6d5-40c5-a599-9905670fc63e","Type":"ContainerDied","Data":"c36a3a19779b394642dee8275db7d78e52c420510f3a69ff202ffdc30938910a"} Dec 02 12:03:46 crc kubenswrapper[4721]: I1202 12:03:46.621612 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ktss9" event={"ID":"664a2f9f-e6d5-40c5-a599-9905670fc63e","Type":"ContainerStarted","Data":"5bf0d67db26816a6f4100b97247cc7bdb62e272ac5a5ecb7946966c8bbf6cd87"} Dec 02 12:03:46 crc kubenswrapper[4721]: I1202 12:03:46.891644 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl" Dec 02 12:03:47 crc kubenswrapper[4721]: I1202 12:03:47.046397 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dc649529-91a2-4d31-9c9c-1ad1523d7182-util\") pod \"dc649529-91a2-4d31-9c9c-1ad1523d7182\" (UID: \"dc649529-91a2-4d31-9c9c-1ad1523d7182\") " Dec 02 12:03:47 crc kubenswrapper[4721]: I1202 12:03:47.046454 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ksgl\" (UniqueName: \"kubernetes.io/projected/dc649529-91a2-4d31-9c9c-1ad1523d7182-kube-api-access-9ksgl\") pod \"dc649529-91a2-4d31-9c9c-1ad1523d7182\" (UID: \"dc649529-91a2-4d31-9c9c-1ad1523d7182\") " Dec 02 12:03:47 crc kubenswrapper[4721]: I1202 12:03:47.046477 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dc649529-91a2-4d31-9c9c-1ad1523d7182-bundle\") pod \"dc649529-91a2-4d31-9c9c-1ad1523d7182\" (UID: \"dc649529-91a2-4d31-9c9c-1ad1523d7182\") " Dec 02 12:03:47 crc kubenswrapper[4721]: I1202 12:03:47.047572 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc649529-91a2-4d31-9c9c-1ad1523d7182-bundle" (OuterVolumeSpecName: "bundle") pod "dc649529-91a2-4d31-9c9c-1ad1523d7182" (UID: "dc649529-91a2-4d31-9c9c-1ad1523d7182"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:03:47 crc kubenswrapper[4721]: I1202 12:03:47.050371 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc649529-91a2-4d31-9c9c-1ad1523d7182-kube-api-access-9ksgl" (OuterVolumeSpecName: "kube-api-access-9ksgl") pod "dc649529-91a2-4d31-9c9c-1ad1523d7182" (UID: "dc649529-91a2-4d31-9c9c-1ad1523d7182"). InnerVolumeSpecName "kube-api-access-9ksgl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:03:47 crc kubenswrapper[4721]: I1202 12:03:47.147849 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ksgl\" (UniqueName: \"kubernetes.io/projected/dc649529-91a2-4d31-9c9c-1ad1523d7182-kube-api-access-9ksgl\") on node \"crc\" DevicePath \"\"" Dec 02 12:03:47 crc kubenswrapper[4721]: I1202 12:03:47.148148 4721 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dc649529-91a2-4d31-9c9c-1ad1523d7182-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 12:03:47 crc kubenswrapper[4721]: I1202 12:03:47.331972 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc649529-91a2-4d31-9c9c-1ad1523d7182-util" (OuterVolumeSpecName: "util") pod "dc649529-91a2-4d31-9c9c-1ad1523d7182" (UID: "dc649529-91a2-4d31-9c9c-1ad1523d7182"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:03:47 crc kubenswrapper[4721]: I1202 12:03:47.349857 4721 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dc649529-91a2-4d31-9c9c-1ad1523d7182-util\") on node \"crc\" DevicePath \"\"" Dec 02 12:03:47 crc kubenswrapper[4721]: I1202 12:03:47.632171 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl" event={"ID":"dc649529-91a2-4d31-9c9c-1ad1523d7182","Type":"ContainerDied","Data":"0d768e79598fb13d1f392fc4750c59a57288cec4a0f459b6a8561fa45df71513"} Dec 02 12:03:47 crc kubenswrapper[4721]: I1202 12:03:47.632210 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d768e79598fb13d1f392fc4750c59a57288cec4a0f459b6a8561fa45df71513" Dec 02 12:03:47 crc kubenswrapper[4721]: I1202 12:03:47.632270 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl" Dec 02 12:03:47 crc kubenswrapper[4721]: I1202 12:03:47.960641 4721 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 02 12:03:48 crc kubenswrapper[4721]: I1202 12:03:48.637607 4721 generic.go:334] "Generic (PLEG): container finished" podID="664a2f9f-e6d5-40c5-a599-9905670fc63e" containerID="5b8932d3dfc78f8718bbf9b7f7ec7abf54b17f6d5517a9a611b28921e9e37212" exitCode=0 Dec 02 12:03:48 crc kubenswrapper[4721]: I1202 12:03:48.637671 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ktss9" event={"ID":"664a2f9f-e6d5-40c5-a599-9905670fc63e","Type":"ContainerDied","Data":"5b8932d3dfc78f8718bbf9b7f7ec7abf54b17f6d5517a9a611b28921e9e37212"} Dec 02 12:03:49 crc kubenswrapper[4721]: I1202 12:03:49.644506 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ktss9" event={"ID":"664a2f9f-e6d5-40c5-a599-9905670fc63e","Type":"ContainerStarted","Data":"649dfefe41354696ee96643225ab6fddd2817f807694fb8a001555651a489a51"} Dec 02 12:03:49 crc kubenswrapper[4721]: I1202 12:03:49.670541 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ktss9" podStartSLOduration=2.232492696 podStartE2EDuration="4.670525281s" podCreationTimestamp="2025-12-02 12:03:45 +0000 UTC" firstStartedPulling="2025-12-02 12:03:46.622772705 +0000 UTC m=+756.361323054" lastFinishedPulling="2025-12-02 12:03:49.06080529 +0000 UTC m=+758.799355639" observedRunningTime="2025-12-02 12:03:49.666800676 +0000 UTC m=+759.405351035" watchObservedRunningTime="2025-12-02 12:03:49.670525281 +0000 UTC m=+759.409075630" Dec 02 12:03:55 crc kubenswrapper[4721]: I1202 12:03:55.702743 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mcr6s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 12:03:55 crc kubenswrapper[4721]: I1202 12:03:55.703468 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 12:03:55 crc kubenswrapper[4721]: I1202 12:03:55.917662 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ktss9" Dec 02 12:03:55 crc kubenswrapper[4721]: I1202 12:03:55.917716 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ktss9" Dec 02 12:03:55 crc kubenswrapper[4721]: I1202 12:03:55.954453 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ktss9" Dec 02 12:03:56 crc kubenswrapper[4721]: I1202 12:03:56.711136 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ktss9" Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.148705 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ktss9"] Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.596609 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-86c7ccf9f9-b95bs"] Dec 02 12:03:58 crc kubenswrapper[4721]: E1202 12:03:58.596800 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc649529-91a2-4d31-9c9c-1ad1523d7182" containerName="util" Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.596810 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc649529-91a2-4d31-9c9c-1ad1523d7182" containerName="util" Dec 02 12:03:58 crc kubenswrapper[4721]: E1202 12:03:58.596825 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc649529-91a2-4d31-9c9c-1ad1523d7182" containerName="extract" Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.596831 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc649529-91a2-4d31-9c9c-1ad1523d7182" containerName="extract" Dec 02 12:03:58 crc kubenswrapper[4721]: E1202 12:03:58.596839 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc649529-91a2-4d31-9c9c-1ad1523d7182" containerName="pull" Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.596845 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc649529-91a2-4d31-9c9c-1ad1523d7182" containerName="pull" Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.596935 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc649529-91a2-4d31-9c9c-1ad1523d7182" containerName="extract" Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.597311 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-86c7ccf9f9-b95bs" Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.599274 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.599305 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.599420 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.600469 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.600514 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-g79s6" Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.612229 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-86c7ccf9f9-b95bs"] Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.679406 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c7571582-1d03-4d8d-92f0-0ec120163508-webhook-cert\") pod \"metallb-operator-controller-manager-86c7ccf9f9-b95bs\" (UID: \"c7571582-1d03-4d8d-92f0-0ec120163508\") " pod="metallb-system/metallb-operator-controller-manager-86c7ccf9f9-b95bs" Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.680433 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c7571582-1d03-4d8d-92f0-0ec120163508-apiservice-cert\") pod \"metallb-operator-controller-manager-86c7ccf9f9-b95bs\" (UID: \"c7571582-1d03-4d8d-92f0-0ec120163508\") " pod="metallb-system/metallb-operator-controller-manager-86c7ccf9f9-b95bs" Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.680473 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zsvtm\" (UniqueName: \"kubernetes.io/projected/c7571582-1d03-4d8d-92f0-0ec120163508-kube-api-access-zsvtm\") pod \"metallb-operator-controller-manager-86c7ccf9f9-b95bs\" (UID: \"c7571582-1d03-4d8d-92f0-0ec120163508\") " pod="metallb-system/metallb-operator-controller-manager-86c7ccf9f9-b95bs" Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.681542 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-ktss9" podUID="664a2f9f-e6d5-40c5-a599-9905670fc63e" containerName="registry-server" containerID="cri-o://649dfefe41354696ee96643225ab6fddd2817f807694fb8a001555651a489a51" gracePeriod=2 Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.781642 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c7571582-1d03-4d8d-92f0-0ec120163508-webhook-cert\") pod \"metallb-operator-controller-manager-86c7ccf9f9-b95bs\" (UID: \"c7571582-1d03-4d8d-92f0-0ec120163508\") " pod="metallb-system/metallb-operator-controller-manager-86c7ccf9f9-b95bs" Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.781749 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c7571582-1d03-4d8d-92f0-0ec120163508-apiservice-cert\") pod \"metallb-operator-controller-manager-86c7ccf9f9-b95bs\" (UID: \"c7571582-1d03-4d8d-92f0-0ec120163508\") " pod="metallb-system/metallb-operator-controller-manager-86c7ccf9f9-b95bs" Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.781821 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zsvtm\" (UniqueName: \"kubernetes.io/projected/c7571582-1d03-4d8d-92f0-0ec120163508-kube-api-access-zsvtm\") pod \"metallb-operator-controller-manager-86c7ccf9f9-b95bs\" (UID: \"c7571582-1d03-4d8d-92f0-0ec120163508\") " pod="metallb-system/metallb-operator-controller-manager-86c7ccf9f9-b95bs" Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.787852 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c7571582-1d03-4d8d-92f0-0ec120163508-webhook-cert\") pod \"metallb-operator-controller-manager-86c7ccf9f9-b95bs\" (UID: \"c7571582-1d03-4d8d-92f0-0ec120163508\") " pod="metallb-system/metallb-operator-controller-manager-86c7ccf9f9-b95bs" Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.791012 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c7571582-1d03-4d8d-92f0-0ec120163508-apiservice-cert\") pod \"metallb-operator-controller-manager-86c7ccf9f9-b95bs\" (UID: \"c7571582-1d03-4d8d-92f0-0ec120163508\") " pod="metallb-system/metallb-operator-controller-manager-86c7ccf9f9-b95bs" Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.799268 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zsvtm\" (UniqueName: \"kubernetes.io/projected/c7571582-1d03-4d8d-92f0-0ec120163508-kube-api-access-zsvtm\") pod \"metallb-operator-controller-manager-86c7ccf9f9-b95bs\" (UID: \"c7571582-1d03-4d8d-92f0-0ec120163508\") " pod="metallb-system/metallb-operator-controller-manager-86c7ccf9f9-b95bs" Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.863654 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-5c48b86c94-n454v"] Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.870097 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5c48b86c94-n454v" Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.875840 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.875841 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-rhsmd" Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.877979 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.884154 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5c48b86c94-n454v"] Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.911181 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-86c7ccf9f9-b95bs" Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.983994 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l77pk\" (UniqueName: \"kubernetes.io/projected/1e7d973f-0100-4daa-b4ca-f6f8f9b90a22-kube-api-access-l77pk\") pod \"metallb-operator-webhook-server-5c48b86c94-n454v\" (UID: \"1e7d973f-0100-4daa-b4ca-f6f8f9b90a22\") " pod="metallb-system/metallb-operator-webhook-server-5c48b86c94-n454v" Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.984047 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1e7d973f-0100-4daa-b4ca-f6f8f9b90a22-webhook-cert\") pod \"metallb-operator-webhook-server-5c48b86c94-n454v\" (UID: \"1e7d973f-0100-4daa-b4ca-f6f8f9b90a22\") " pod="metallb-system/metallb-operator-webhook-server-5c48b86c94-n454v" Dec 02 12:03:58 crc kubenswrapper[4721]: I1202 12:03:58.984132 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1e7d973f-0100-4daa-b4ca-f6f8f9b90a22-apiservice-cert\") pod \"metallb-operator-webhook-server-5c48b86c94-n454v\" (UID: \"1e7d973f-0100-4daa-b4ca-f6f8f9b90a22\") " pod="metallb-system/metallb-operator-webhook-server-5c48b86c94-n454v" Dec 02 12:03:59 crc kubenswrapper[4721]: I1202 12:03:59.085332 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l77pk\" (UniqueName: \"kubernetes.io/projected/1e7d973f-0100-4daa-b4ca-f6f8f9b90a22-kube-api-access-l77pk\") pod \"metallb-operator-webhook-server-5c48b86c94-n454v\" (UID: \"1e7d973f-0100-4daa-b4ca-f6f8f9b90a22\") " pod="metallb-system/metallb-operator-webhook-server-5c48b86c94-n454v" Dec 02 12:03:59 crc kubenswrapper[4721]: I1202 12:03:59.085381 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1e7d973f-0100-4daa-b4ca-f6f8f9b90a22-webhook-cert\") pod \"metallb-operator-webhook-server-5c48b86c94-n454v\" (UID: \"1e7d973f-0100-4daa-b4ca-f6f8f9b90a22\") " pod="metallb-system/metallb-operator-webhook-server-5c48b86c94-n454v" Dec 02 12:03:59 crc kubenswrapper[4721]: I1202 12:03:59.085437 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1e7d973f-0100-4daa-b4ca-f6f8f9b90a22-apiservice-cert\") pod \"metallb-operator-webhook-server-5c48b86c94-n454v\" (UID: \"1e7d973f-0100-4daa-b4ca-f6f8f9b90a22\") " pod="metallb-system/metallb-operator-webhook-server-5c48b86c94-n454v" Dec 02 12:03:59 crc kubenswrapper[4721]: I1202 12:03:59.092207 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1e7d973f-0100-4daa-b4ca-f6f8f9b90a22-apiservice-cert\") pod \"metallb-operator-webhook-server-5c48b86c94-n454v\" (UID: \"1e7d973f-0100-4daa-b4ca-f6f8f9b90a22\") " pod="metallb-system/metallb-operator-webhook-server-5c48b86c94-n454v" Dec 02 12:03:59 crc kubenswrapper[4721]: I1202 12:03:59.093056 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1e7d973f-0100-4daa-b4ca-f6f8f9b90a22-webhook-cert\") pod \"metallb-operator-webhook-server-5c48b86c94-n454v\" (UID: \"1e7d973f-0100-4daa-b4ca-f6f8f9b90a22\") " pod="metallb-system/metallb-operator-webhook-server-5c48b86c94-n454v" Dec 02 12:03:59 crc kubenswrapper[4721]: I1202 12:03:59.101453 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l77pk\" (UniqueName: \"kubernetes.io/projected/1e7d973f-0100-4daa-b4ca-f6f8f9b90a22-kube-api-access-l77pk\") pod \"metallb-operator-webhook-server-5c48b86c94-n454v\" (UID: \"1e7d973f-0100-4daa-b4ca-f6f8f9b90a22\") " pod="metallb-system/metallb-operator-webhook-server-5c48b86c94-n454v" Dec 02 12:03:59 crc kubenswrapper[4721]: I1202 12:03:59.192090 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5c48b86c94-n454v" Dec 02 12:03:59 crc kubenswrapper[4721]: I1202 12:03:59.233705 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-86c7ccf9f9-b95bs"] Dec 02 12:03:59 crc kubenswrapper[4721]: I1202 12:03:59.443791 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5c48b86c94-n454v"] Dec 02 12:03:59 crc kubenswrapper[4721]: W1202 12:03:59.447602 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1e7d973f_0100_4daa_b4ca_f6f8f9b90a22.slice/crio-0820a94c7875385b399a04a66c3e5bceefb60853214d4a054c66f6da57068076 WatchSource:0}: Error finding container 0820a94c7875385b399a04a66c3e5bceefb60853214d4a054c66f6da57068076: Status 404 returned error can't find the container with id 0820a94c7875385b399a04a66c3e5bceefb60853214d4a054c66f6da57068076 Dec 02 12:03:59 crc kubenswrapper[4721]: I1202 12:03:59.686876 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5c48b86c94-n454v" event={"ID":"1e7d973f-0100-4daa-b4ca-f6f8f9b90a22","Type":"ContainerStarted","Data":"0820a94c7875385b399a04a66c3e5bceefb60853214d4a054c66f6da57068076"} Dec 02 12:03:59 crc kubenswrapper[4721]: I1202 12:03:59.687998 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-86c7ccf9f9-b95bs" event={"ID":"c7571582-1d03-4d8d-92f0-0ec120163508","Type":"ContainerStarted","Data":"fa22785fec92d0572486ee916058929bfac5b237d14aa49e132925587b5a73d4"} Dec 02 12:04:00 crc kubenswrapper[4721]: I1202 12:04:00.694591 4721 generic.go:334] "Generic (PLEG): container finished" podID="664a2f9f-e6d5-40c5-a599-9905670fc63e" containerID="649dfefe41354696ee96643225ab6fddd2817f807694fb8a001555651a489a51" exitCode=0 Dec 02 12:04:00 crc kubenswrapper[4721]: I1202 12:04:00.694667 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ktss9" event={"ID":"664a2f9f-e6d5-40c5-a599-9905670fc63e","Type":"ContainerDied","Data":"649dfefe41354696ee96643225ab6fddd2817f807694fb8a001555651a489a51"} Dec 02 12:04:00 crc kubenswrapper[4721]: I1202 12:04:00.876040 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ktss9" Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.009733 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tw9kv\" (UniqueName: \"kubernetes.io/projected/664a2f9f-e6d5-40c5-a599-9905670fc63e-kube-api-access-tw9kv\") pod \"664a2f9f-e6d5-40c5-a599-9905670fc63e\" (UID: \"664a2f9f-e6d5-40c5-a599-9905670fc63e\") " Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.009817 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/664a2f9f-e6d5-40c5-a599-9905670fc63e-utilities\") pod \"664a2f9f-e6d5-40c5-a599-9905670fc63e\" (UID: \"664a2f9f-e6d5-40c5-a599-9905670fc63e\") " Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.009904 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/664a2f9f-e6d5-40c5-a599-9905670fc63e-catalog-content\") pod \"664a2f9f-e6d5-40c5-a599-9905670fc63e\" (UID: \"664a2f9f-e6d5-40c5-a599-9905670fc63e\") " Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.010889 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/664a2f9f-e6d5-40c5-a599-9905670fc63e-utilities" (OuterVolumeSpecName: "utilities") pod "664a2f9f-e6d5-40c5-a599-9905670fc63e" (UID: "664a2f9f-e6d5-40c5-a599-9905670fc63e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.028053 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/664a2f9f-e6d5-40c5-a599-9905670fc63e-kube-api-access-tw9kv" (OuterVolumeSpecName: "kube-api-access-tw9kv") pod "664a2f9f-e6d5-40c5-a599-9905670fc63e" (UID: "664a2f9f-e6d5-40c5-a599-9905670fc63e"). InnerVolumeSpecName "kube-api-access-tw9kv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.110413 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/664a2f9f-e6d5-40c5-a599-9905670fc63e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "664a2f9f-e6d5-40c5-a599-9905670fc63e" (UID: "664a2f9f-e6d5-40c5-a599-9905670fc63e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.110566 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/664a2f9f-e6d5-40c5-a599-9905670fc63e-catalog-content\") pod \"664a2f9f-e6d5-40c5-a599-9905670fc63e\" (UID: \"664a2f9f-e6d5-40c5-a599-9905670fc63e\") " Dec 02 12:04:01 crc kubenswrapper[4721]: W1202 12:04:01.110669 4721 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/664a2f9f-e6d5-40c5-a599-9905670fc63e/volumes/kubernetes.io~empty-dir/catalog-content Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.110685 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/664a2f9f-e6d5-40c5-a599-9905670fc63e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "664a2f9f-e6d5-40c5-a599-9905670fc63e" (UID: "664a2f9f-e6d5-40c5-a599-9905670fc63e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.110802 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/664a2f9f-e6d5-40c5-a599-9905670fc63e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.110822 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tw9kv\" (UniqueName: \"kubernetes.io/projected/664a2f9f-e6d5-40c5-a599-9905670fc63e-kube-api-access-tw9kv\") on node \"crc\" DevicePath \"\"" Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.110834 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/664a2f9f-e6d5-40c5-a599-9905670fc63e-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.358693 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-h5trv"] Dec 02 12:04:01 crc kubenswrapper[4721]: E1202 12:04:01.358983 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="664a2f9f-e6d5-40c5-a599-9905670fc63e" containerName="extract-utilities" Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.359002 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="664a2f9f-e6d5-40c5-a599-9905670fc63e" containerName="extract-utilities" Dec 02 12:04:01 crc kubenswrapper[4721]: E1202 12:04:01.359015 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="664a2f9f-e6d5-40c5-a599-9905670fc63e" containerName="extract-content" Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.359023 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="664a2f9f-e6d5-40c5-a599-9905670fc63e" containerName="extract-content" Dec 02 12:04:01 crc kubenswrapper[4721]: E1202 12:04:01.359034 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="664a2f9f-e6d5-40c5-a599-9905670fc63e" containerName="registry-server" Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.359041 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="664a2f9f-e6d5-40c5-a599-9905670fc63e" containerName="registry-server" Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.359180 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="664a2f9f-e6d5-40c5-a599-9905670fc63e" containerName="registry-server" Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.359956 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h5trv" Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.377073 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-h5trv"] Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.514091 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sg4lt\" (UniqueName: \"kubernetes.io/projected/bc94f95a-a843-4396-b6eb-3dc7c3182db7-kube-api-access-sg4lt\") pod \"redhat-marketplace-h5trv\" (UID: \"bc94f95a-a843-4396-b6eb-3dc7c3182db7\") " pod="openshift-marketplace/redhat-marketplace-h5trv" Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.514163 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc94f95a-a843-4396-b6eb-3dc7c3182db7-utilities\") pod \"redhat-marketplace-h5trv\" (UID: \"bc94f95a-a843-4396-b6eb-3dc7c3182db7\") " pod="openshift-marketplace/redhat-marketplace-h5trv" Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.514204 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc94f95a-a843-4396-b6eb-3dc7c3182db7-catalog-content\") pod \"redhat-marketplace-h5trv\" (UID: \"bc94f95a-a843-4396-b6eb-3dc7c3182db7\") " pod="openshift-marketplace/redhat-marketplace-h5trv" Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.615575 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc94f95a-a843-4396-b6eb-3dc7c3182db7-utilities\") pod \"redhat-marketplace-h5trv\" (UID: \"bc94f95a-a843-4396-b6eb-3dc7c3182db7\") " pod="openshift-marketplace/redhat-marketplace-h5trv" Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.615633 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc94f95a-a843-4396-b6eb-3dc7c3182db7-catalog-content\") pod \"redhat-marketplace-h5trv\" (UID: \"bc94f95a-a843-4396-b6eb-3dc7c3182db7\") " pod="openshift-marketplace/redhat-marketplace-h5trv" Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.615697 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sg4lt\" (UniqueName: \"kubernetes.io/projected/bc94f95a-a843-4396-b6eb-3dc7c3182db7-kube-api-access-sg4lt\") pod \"redhat-marketplace-h5trv\" (UID: \"bc94f95a-a843-4396-b6eb-3dc7c3182db7\") " pod="openshift-marketplace/redhat-marketplace-h5trv" Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.616040 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc94f95a-a843-4396-b6eb-3dc7c3182db7-utilities\") pod \"redhat-marketplace-h5trv\" (UID: \"bc94f95a-a843-4396-b6eb-3dc7c3182db7\") " pod="openshift-marketplace/redhat-marketplace-h5trv" Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.616187 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc94f95a-a843-4396-b6eb-3dc7c3182db7-catalog-content\") pod \"redhat-marketplace-h5trv\" (UID: \"bc94f95a-a843-4396-b6eb-3dc7c3182db7\") " pod="openshift-marketplace/redhat-marketplace-h5trv" Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.633948 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sg4lt\" (UniqueName: \"kubernetes.io/projected/bc94f95a-a843-4396-b6eb-3dc7c3182db7-kube-api-access-sg4lt\") pod \"redhat-marketplace-h5trv\" (UID: \"bc94f95a-a843-4396-b6eb-3dc7c3182db7\") " pod="openshift-marketplace/redhat-marketplace-h5trv" Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.690494 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h5trv" Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.701827 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ktss9" event={"ID":"664a2f9f-e6d5-40c5-a599-9905670fc63e","Type":"ContainerDied","Data":"5bf0d67db26816a6f4100b97247cc7bdb62e272ac5a5ecb7946966c8bbf6cd87"} Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.701876 4721 scope.go:117] "RemoveContainer" containerID="649dfefe41354696ee96643225ab6fddd2817f807694fb8a001555651a489a51" Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.701985 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ktss9" Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.734305 4721 scope.go:117] "RemoveContainer" containerID="5b8932d3dfc78f8718bbf9b7f7ec7abf54b17f6d5517a9a611b28921e9e37212" Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.739566 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ktss9"] Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.743783 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-ktss9"] Dec 02 12:04:01 crc kubenswrapper[4721]: I1202 12:04:01.761055 4721 scope.go:117] "RemoveContainer" containerID="c36a3a19779b394642dee8275db7d78e52c420510f3a69ff202ffdc30938910a" Dec 02 12:04:02 crc kubenswrapper[4721]: I1202 12:04:02.140043 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-h5trv"] Dec 02 12:04:02 crc kubenswrapper[4721]: I1202 12:04:02.845798 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="664a2f9f-e6d5-40c5-a599-9905670fc63e" path="/var/lib/kubelet/pods/664a2f9f-e6d5-40c5-a599-9905670fc63e/volumes" Dec 02 12:04:02 crc kubenswrapper[4721]: W1202 12:04:02.998969 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbc94f95a_a843_4396_b6eb_3dc7c3182db7.slice/crio-dd2c2748bf6dd5bc882fdfde174627498bd0dcc0d2e0b26b9ad25bed9aa08017 WatchSource:0}: Error finding container dd2c2748bf6dd5bc882fdfde174627498bd0dcc0d2e0b26b9ad25bed9aa08017: Status 404 returned error can't find the container with id dd2c2748bf6dd5bc882fdfde174627498bd0dcc0d2e0b26b9ad25bed9aa08017 Dec 02 12:04:03 crc kubenswrapper[4721]: I1202 12:04:03.713867 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5trv" event={"ID":"bc94f95a-a843-4396-b6eb-3dc7c3182db7","Type":"ContainerStarted","Data":"dd2c2748bf6dd5bc882fdfde174627498bd0dcc0d2e0b26b9ad25bed9aa08017"} Dec 02 12:04:05 crc kubenswrapper[4721]: I1202 12:04:05.724525 4721 generic.go:334] "Generic (PLEG): container finished" podID="bc94f95a-a843-4396-b6eb-3dc7c3182db7" containerID="00d561776e485c4b061bef9e83598a5444497a93a016ae36aec95e5568ab87d5" exitCode=0 Dec 02 12:04:05 crc kubenswrapper[4721]: I1202 12:04:05.724711 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5trv" event={"ID":"bc94f95a-a843-4396-b6eb-3dc7c3182db7","Type":"ContainerDied","Data":"00d561776e485c4b061bef9e83598a5444497a93a016ae36aec95e5568ab87d5"} Dec 02 12:04:05 crc kubenswrapper[4721]: I1202 12:04:05.726639 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5c48b86c94-n454v" event={"ID":"1e7d973f-0100-4daa-b4ca-f6f8f9b90a22","Type":"ContainerStarted","Data":"ff2d8b6d99dd084b22340e8cfd13cb798dc98d5372708deae67b2807e67989e9"} Dec 02 12:04:05 crc kubenswrapper[4721]: I1202 12:04:05.726749 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-5c48b86c94-n454v" Dec 02 12:04:05 crc kubenswrapper[4721]: I1202 12:04:05.729291 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-86c7ccf9f9-b95bs" event={"ID":"c7571582-1d03-4d8d-92f0-0ec120163508","Type":"ContainerStarted","Data":"a38325f0149fe0896ba1bf116a50de27625dd2b1086350fb1276e07bfba028f3"} Dec 02 12:04:05 crc kubenswrapper[4721]: I1202 12:04:05.729368 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-86c7ccf9f9-b95bs" Dec 02 12:04:05 crc kubenswrapper[4721]: I1202 12:04:05.766992 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-86c7ccf9f9-b95bs" podStartSLOduration=2.1444158939999998 podStartE2EDuration="7.766965774s" podCreationTimestamp="2025-12-02 12:03:58 +0000 UTC" firstStartedPulling="2025-12-02 12:03:59.251771317 +0000 UTC m=+768.990321666" lastFinishedPulling="2025-12-02 12:04:04.874321197 +0000 UTC m=+774.612871546" observedRunningTime="2025-12-02 12:04:05.761600908 +0000 UTC m=+775.500151257" watchObservedRunningTime="2025-12-02 12:04:05.766965774 +0000 UTC m=+775.505516123" Dec 02 12:04:05 crc kubenswrapper[4721]: I1202 12:04:05.782347 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-5c48b86c94-n454v" podStartSLOduration=2.267677786 podStartE2EDuration="7.782323856s" podCreationTimestamp="2025-12-02 12:03:58 +0000 UTC" firstStartedPulling="2025-12-02 12:03:59.450209433 +0000 UTC m=+769.188759782" lastFinishedPulling="2025-12-02 12:04:04.964855503 +0000 UTC m=+774.703405852" observedRunningTime="2025-12-02 12:04:05.78183282 +0000 UTC m=+775.520383179" watchObservedRunningTime="2025-12-02 12:04:05.782323856 +0000 UTC m=+775.520874205" Dec 02 12:04:06 crc kubenswrapper[4721]: I1202 12:04:06.734529 4721 generic.go:334] "Generic (PLEG): container finished" podID="bc94f95a-a843-4396-b6eb-3dc7c3182db7" containerID="8d6d3fd2927b41374eb00b05c989b4265ae895c0a41fb59946c3a9f718859c8d" exitCode=0 Dec 02 12:04:06 crc kubenswrapper[4721]: I1202 12:04:06.734614 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5trv" event={"ID":"bc94f95a-a843-4396-b6eb-3dc7c3182db7","Type":"ContainerDied","Data":"8d6d3fd2927b41374eb00b05c989b4265ae895c0a41fb59946c3a9f718859c8d"} Dec 02 12:04:07 crc kubenswrapper[4721]: I1202 12:04:07.740656 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5trv" event={"ID":"bc94f95a-a843-4396-b6eb-3dc7c3182db7","Type":"ContainerStarted","Data":"d73403dc0c122ec4a008c55c55faacd068ba9993c055fee895024206d36d0f3d"} Dec 02 12:04:07 crc kubenswrapper[4721]: I1202 12:04:07.759119 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-h5trv" podStartSLOduration=5.104740726 podStartE2EDuration="6.759102979s" podCreationTimestamp="2025-12-02 12:04:01 +0000 UTC" firstStartedPulling="2025-12-02 12:04:05.72624801 +0000 UTC m=+775.464798359" lastFinishedPulling="2025-12-02 12:04:07.380610263 +0000 UTC m=+777.119160612" observedRunningTime="2025-12-02 12:04:07.754447046 +0000 UTC m=+777.492997415" watchObservedRunningTime="2025-12-02 12:04:07.759102979 +0000 UTC m=+777.497653328" Dec 02 12:04:11 crc kubenswrapper[4721]: I1202 12:04:11.094155 4721 scope.go:117] "RemoveContainer" containerID="4f55051ff5eedef4b085c57ab10b6545bc8d2abdc70c6e15444e4e0adcd5f659" Dec 02 12:04:11 crc kubenswrapper[4721]: I1202 12:04:11.691510 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-h5trv" Dec 02 12:04:11 crc kubenswrapper[4721]: I1202 12:04:11.693502 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-h5trv" Dec 02 12:04:11 crc kubenswrapper[4721]: I1202 12:04:11.737139 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-h5trv" Dec 02 12:04:12 crc kubenswrapper[4721]: I1202 12:04:12.766050 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jzz4m_3becf918-fa56-4f68-9963-b19fb91d4343/kube-multus/2.log" Dec 02 12:04:19 crc kubenswrapper[4721]: I1202 12:04:19.196558 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-5c48b86c94-n454v" Dec 02 12:04:21 crc kubenswrapper[4721]: I1202 12:04:21.729881 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-h5trv" Dec 02 12:04:22 crc kubenswrapper[4721]: I1202 12:04:22.426581 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-h5trv"] Dec 02 12:04:22 crc kubenswrapper[4721]: I1202 12:04:22.427045 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-h5trv" podUID="bc94f95a-a843-4396-b6eb-3dc7c3182db7" containerName="registry-server" containerID="cri-o://d73403dc0c122ec4a008c55c55faacd068ba9993c055fee895024206d36d0f3d" gracePeriod=2 Dec 02 12:04:22 crc kubenswrapper[4721]: I1202 12:04:22.823886 4721 generic.go:334] "Generic (PLEG): container finished" podID="bc94f95a-a843-4396-b6eb-3dc7c3182db7" containerID="d73403dc0c122ec4a008c55c55faacd068ba9993c055fee895024206d36d0f3d" exitCode=0 Dec 02 12:04:22 crc kubenswrapper[4721]: I1202 12:04:22.823932 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5trv" event={"ID":"bc94f95a-a843-4396-b6eb-3dc7c3182db7","Type":"ContainerDied","Data":"d73403dc0c122ec4a008c55c55faacd068ba9993c055fee895024206d36d0f3d"} Dec 02 12:04:23 crc kubenswrapper[4721]: I1202 12:04:23.323919 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h5trv" Dec 02 12:04:23 crc kubenswrapper[4721]: I1202 12:04:23.406285 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc94f95a-a843-4396-b6eb-3dc7c3182db7-catalog-content\") pod \"bc94f95a-a843-4396-b6eb-3dc7c3182db7\" (UID: \"bc94f95a-a843-4396-b6eb-3dc7c3182db7\") " Dec 02 12:04:23 crc kubenswrapper[4721]: I1202 12:04:23.406429 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc94f95a-a843-4396-b6eb-3dc7c3182db7-utilities\") pod \"bc94f95a-a843-4396-b6eb-3dc7c3182db7\" (UID: \"bc94f95a-a843-4396-b6eb-3dc7c3182db7\") " Dec 02 12:04:23 crc kubenswrapper[4721]: I1202 12:04:23.406470 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sg4lt\" (UniqueName: \"kubernetes.io/projected/bc94f95a-a843-4396-b6eb-3dc7c3182db7-kube-api-access-sg4lt\") pod \"bc94f95a-a843-4396-b6eb-3dc7c3182db7\" (UID: \"bc94f95a-a843-4396-b6eb-3dc7c3182db7\") " Dec 02 12:04:23 crc kubenswrapper[4721]: I1202 12:04:23.407128 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc94f95a-a843-4396-b6eb-3dc7c3182db7-utilities" (OuterVolumeSpecName: "utilities") pod "bc94f95a-a843-4396-b6eb-3dc7c3182db7" (UID: "bc94f95a-a843-4396-b6eb-3dc7c3182db7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:04:23 crc kubenswrapper[4721]: I1202 12:04:23.411867 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc94f95a-a843-4396-b6eb-3dc7c3182db7-kube-api-access-sg4lt" (OuterVolumeSpecName: "kube-api-access-sg4lt") pod "bc94f95a-a843-4396-b6eb-3dc7c3182db7" (UID: "bc94f95a-a843-4396-b6eb-3dc7c3182db7"). InnerVolumeSpecName "kube-api-access-sg4lt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:04:23 crc kubenswrapper[4721]: I1202 12:04:23.423530 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc94f95a-a843-4396-b6eb-3dc7c3182db7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bc94f95a-a843-4396-b6eb-3dc7c3182db7" (UID: "bc94f95a-a843-4396-b6eb-3dc7c3182db7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:04:23 crc kubenswrapper[4721]: I1202 12:04:23.507997 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc94f95a-a843-4396-b6eb-3dc7c3182db7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 12:04:23 crc kubenswrapper[4721]: I1202 12:04:23.508044 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc94f95a-a843-4396-b6eb-3dc7c3182db7-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 12:04:23 crc kubenswrapper[4721]: I1202 12:04:23.508063 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sg4lt\" (UniqueName: \"kubernetes.io/projected/bc94f95a-a843-4396-b6eb-3dc7c3182db7-kube-api-access-sg4lt\") on node \"crc\" DevicePath \"\"" Dec 02 12:04:23 crc kubenswrapper[4721]: I1202 12:04:23.840787 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h5trv" event={"ID":"bc94f95a-a843-4396-b6eb-3dc7c3182db7","Type":"ContainerDied","Data":"dd2c2748bf6dd5bc882fdfde174627498bd0dcc0d2e0b26b9ad25bed9aa08017"} Dec 02 12:04:23 crc kubenswrapper[4721]: I1202 12:04:23.840843 4721 scope.go:117] "RemoveContainer" containerID="d73403dc0c122ec4a008c55c55faacd068ba9993c055fee895024206d36d0f3d" Dec 02 12:04:23 crc kubenswrapper[4721]: I1202 12:04:23.840967 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h5trv" Dec 02 12:04:23 crc kubenswrapper[4721]: I1202 12:04:23.858790 4721 scope.go:117] "RemoveContainer" containerID="8d6d3fd2927b41374eb00b05c989b4265ae895c0a41fb59946c3a9f718859c8d" Dec 02 12:04:23 crc kubenswrapper[4721]: I1202 12:04:23.892717 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-h5trv"] Dec 02 12:04:23 crc kubenswrapper[4721]: I1202 12:04:23.897477 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-h5trv"] Dec 02 12:04:23 crc kubenswrapper[4721]: I1202 12:04:23.901955 4721 scope.go:117] "RemoveContainer" containerID="00d561776e485c4b061bef9e83598a5444497a93a016ae36aec95e5568ab87d5" Dec 02 12:04:24 crc kubenswrapper[4721]: I1202 12:04:24.847876 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc94f95a-a843-4396-b6eb-3dc7c3182db7" path="/var/lib/kubelet/pods/bc94f95a-a843-4396-b6eb-3dc7c3182db7/volumes" Dec 02 12:04:25 crc kubenswrapper[4721]: I1202 12:04:25.703471 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mcr6s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 12:04:25 crc kubenswrapper[4721]: I1202 12:04:25.703828 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 12:04:38 crc kubenswrapper[4721]: I1202 12:04:38.913911 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-86c7ccf9f9-b95bs" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.725008 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-qmwqj"] Dec 02 12:04:39 crc kubenswrapper[4721]: E1202 12:04:39.725708 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc94f95a-a843-4396-b6eb-3dc7c3182db7" containerName="extract-content" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.725742 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc94f95a-a843-4396-b6eb-3dc7c3182db7" containerName="extract-content" Dec 02 12:04:39 crc kubenswrapper[4721]: E1202 12:04:39.725765 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc94f95a-a843-4396-b6eb-3dc7c3182db7" containerName="extract-utilities" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.725778 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc94f95a-a843-4396-b6eb-3dc7c3182db7" containerName="extract-utilities" Dec 02 12:04:39 crc kubenswrapper[4721]: E1202 12:04:39.725808 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc94f95a-a843-4396-b6eb-3dc7c3182db7" containerName="registry-server" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.725822 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc94f95a-a843-4396-b6eb-3dc7c3182db7" containerName="registry-server" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.725993 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc94f95a-a843-4396-b6eb-3dc7c3182db7" containerName="registry-server" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.726638 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-qmwqj" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.728557 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-69bnp"] Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.728667 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-2z85w" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.729314 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.731000 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-69bnp" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.734004 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.738023 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.742574 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-qmwqj"] Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.813691 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-m4dmx"] Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.814482 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-m4dmx" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.816485 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.816579 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-2vmzv" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.816750 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.816962 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.839931 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/9e956286-b38a-40dc-b65c-ab7395592012-frr-startup\") pod \"frr-k8s-69bnp\" (UID: \"9e956286-b38a-40dc-b65c-ab7395592012\") " pod="metallb-system/frr-k8s-69bnp" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.840010 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/9e956286-b38a-40dc-b65c-ab7395592012-frr-conf\") pod \"frr-k8s-69bnp\" (UID: \"9e956286-b38a-40dc-b65c-ab7395592012\") " pod="metallb-system/frr-k8s-69bnp" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.840314 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9e956286-b38a-40dc-b65c-ab7395592012-metrics-certs\") pod \"frr-k8s-69bnp\" (UID: \"9e956286-b38a-40dc-b65c-ab7395592012\") " pod="metallb-system/frr-k8s-69bnp" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.840397 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/9e956286-b38a-40dc-b65c-ab7395592012-reloader\") pod \"frr-k8s-69bnp\" (UID: \"9e956286-b38a-40dc-b65c-ab7395592012\") " pod="metallb-system/frr-k8s-69bnp" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.840451 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wn7lr\" (UniqueName: \"kubernetes.io/projected/9e956286-b38a-40dc-b65c-ab7395592012-kube-api-access-wn7lr\") pod \"frr-k8s-69bnp\" (UID: \"9e956286-b38a-40dc-b65c-ab7395592012\") " pod="metallb-system/frr-k8s-69bnp" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.840472 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/9e956286-b38a-40dc-b65c-ab7395592012-metrics\") pod \"frr-k8s-69bnp\" (UID: \"9e956286-b38a-40dc-b65c-ab7395592012\") " pod="metallb-system/frr-k8s-69bnp" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.840603 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/33b0035f-5cb5-458f-a5f9-498a488c358f-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-qmwqj\" (UID: \"33b0035f-5cb5-458f-a5f9-498a488c358f\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-qmwqj" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.840659 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/9e956286-b38a-40dc-b65c-ab7395592012-frr-sockets\") pod \"frr-k8s-69bnp\" (UID: \"9e956286-b38a-40dc-b65c-ab7395592012\") " pod="metallb-system/frr-k8s-69bnp" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.840767 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jcsq\" (UniqueName: \"kubernetes.io/projected/33b0035f-5cb5-458f-a5f9-498a488c358f-kube-api-access-7jcsq\") pod \"frr-k8s-webhook-server-7fcb986d4-qmwqj\" (UID: \"33b0035f-5cb5-458f-a5f9-498a488c358f\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-qmwqj" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.851654 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-clfzp"] Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.852471 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-clfzp" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.854469 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.867199 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-clfzp"] Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.941409 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/9e956286-b38a-40dc-b65c-ab7395592012-frr-sockets\") pod \"frr-k8s-69bnp\" (UID: \"9e956286-b38a-40dc-b65c-ab7395592012\") " pod="metallb-system/frr-k8s-69bnp" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.941461 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/b91afd1f-b9c6-49e2-8c26-93bdf658600e-metallb-excludel2\") pod \"speaker-m4dmx\" (UID: \"b91afd1f-b9c6-49e2-8c26-93bdf658600e\") " pod="metallb-system/speaker-m4dmx" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.941498 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jcsq\" (UniqueName: \"kubernetes.io/projected/33b0035f-5cb5-458f-a5f9-498a488c358f-kube-api-access-7jcsq\") pod \"frr-k8s-webhook-server-7fcb986d4-qmwqj\" (UID: \"33b0035f-5cb5-458f-a5f9-498a488c358f\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-qmwqj" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.941527 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrwfv\" (UniqueName: \"kubernetes.io/projected/b91afd1f-b9c6-49e2-8c26-93bdf658600e-kube-api-access-zrwfv\") pod \"speaker-m4dmx\" (UID: \"b91afd1f-b9c6-49e2-8c26-93bdf658600e\") " pod="metallb-system/speaker-m4dmx" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.941569 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/9e956286-b38a-40dc-b65c-ab7395592012-frr-startup\") pod \"frr-k8s-69bnp\" (UID: \"9e956286-b38a-40dc-b65c-ab7395592012\") " pod="metallb-system/frr-k8s-69bnp" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.941642 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/9e956286-b38a-40dc-b65c-ab7395592012-frr-conf\") pod \"frr-k8s-69bnp\" (UID: \"9e956286-b38a-40dc-b65c-ab7395592012\") " pod="metallb-system/frr-k8s-69bnp" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.941707 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9e956286-b38a-40dc-b65c-ab7395592012-metrics-certs\") pod \"frr-k8s-69bnp\" (UID: \"9e956286-b38a-40dc-b65c-ab7395592012\") " pod="metallb-system/frr-k8s-69bnp" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.941729 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/9e956286-b38a-40dc-b65c-ab7395592012-reloader\") pod \"frr-k8s-69bnp\" (UID: \"9e956286-b38a-40dc-b65c-ab7395592012\") " pod="metallb-system/frr-k8s-69bnp" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.941747 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b91afd1f-b9c6-49e2-8c26-93bdf658600e-memberlist\") pod \"speaker-m4dmx\" (UID: \"b91afd1f-b9c6-49e2-8c26-93bdf658600e\") " pod="metallb-system/speaker-m4dmx" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.941779 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wn7lr\" (UniqueName: \"kubernetes.io/projected/9e956286-b38a-40dc-b65c-ab7395592012-kube-api-access-wn7lr\") pod \"frr-k8s-69bnp\" (UID: \"9e956286-b38a-40dc-b65c-ab7395592012\") " pod="metallb-system/frr-k8s-69bnp" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.941795 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/9e956286-b38a-40dc-b65c-ab7395592012-metrics\") pod \"frr-k8s-69bnp\" (UID: \"9e956286-b38a-40dc-b65c-ab7395592012\") " pod="metallb-system/frr-k8s-69bnp" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.941811 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b91afd1f-b9c6-49e2-8c26-93bdf658600e-metrics-certs\") pod \"speaker-m4dmx\" (UID: \"b91afd1f-b9c6-49e2-8c26-93bdf658600e\") " pod="metallb-system/speaker-m4dmx" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.941848 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/33b0035f-5cb5-458f-a5f9-498a488c358f-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-qmwqj\" (UID: \"33b0035f-5cb5-458f-a5f9-498a488c358f\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-qmwqj" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.941935 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/9e956286-b38a-40dc-b65c-ab7395592012-frr-sockets\") pod \"frr-k8s-69bnp\" (UID: \"9e956286-b38a-40dc-b65c-ab7395592012\") " pod="metallb-system/frr-k8s-69bnp" Dec 02 12:04:39 crc kubenswrapper[4721]: E1202 12:04:39.942253 4721 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Dec 02 12:04:39 crc kubenswrapper[4721]: E1202 12:04:39.942315 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9e956286-b38a-40dc-b65c-ab7395592012-metrics-certs podName:9e956286-b38a-40dc-b65c-ab7395592012 nodeName:}" failed. No retries permitted until 2025-12-02 12:04:40.442294525 +0000 UTC m=+810.180844974 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9e956286-b38a-40dc-b65c-ab7395592012-metrics-certs") pod "frr-k8s-69bnp" (UID: "9e956286-b38a-40dc-b65c-ab7395592012") : secret "frr-k8s-certs-secret" not found Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.942438 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/9e956286-b38a-40dc-b65c-ab7395592012-reloader\") pod \"frr-k8s-69bnp\" (UID: \"9e956286-b38a-40dc-b65c-ab7395592012\") " pod="metallb-system/frr-k8s-69bnp" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.942565 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/9e956286-b38a-40dc-b65c-ab7395592012-metrics\") pod \"frr-k8s-69bnp\" (UID: \"9e956286-b38a-40dc-b65c-ab7395592012\") " pod="metallb-system/frr-k8s-69bnp" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.942599 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/9e956286-b38a-40dc-b65c-ab7395592012-frr-startup\") pod \"frr-k8s-69bnp\" (UID: \"9e956286-b38a-40dc-b65c-ab7395592012\") " pod="metallb-system/frr-k8s-69bnp" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.942667 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/9e956286-b38a-40dc-b65c-ab7395592012-frr-conf\") pod \"frr-k8s-69bnp\" (UID: \"9e956286-b38a-40dc-b65c-ab7395592012\") " pod="metallb-system/frr-k8s-69bnp" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.947004 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/33b0035f-5cb5-458f-a5f9-498a488c358f-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-qmwqj\" (UID: \"33b0035f-5cb5-458f-a5f9-498a488c358f\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-qmwqj" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.958653 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wn7lr\" (UniqueName: \"kubernetes.io/projected/9e956286-b38a-40dc-b65c-ab7395592012-kube-api-access-wn7lr\") pod \"frr-k8s-69bnp\" (UID: \"9e956286-b38a-40dc-b65c-ab7395592012\") " pod="metallb-system/frr-k8s-69bnp" Dec 02 12:04:39 crc kubenswrapper[4721]: I1202 12:04:39.959088 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jcsq\" (UniqueName: \"kubernetes.io/projected/33b0035f-5cb5-458f-a5f9-498a488c358f-kube-api-access-7jcsq\") pod \"frr-k8s-webhook-server-7fcb986d4-qmwqj\" (UID: \"33b0035f-5cb5-458f-a5f9-498a488c358f\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-qmwqj" Dec 02 12:04:40 crc kubenswrapper[4721]: I1202 12:04:40.042720 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b91afd1f-b9c6-49e2-8c26-93bdf658600e-memberlist\") pod \"speaker-m4dmx\" (UID: \"b91afd1f-b9c6-49e2-8c26-93bdf658600e\") " pod="metallb-system/speaker-m4dmx" Dec 02 12:04:40 crc kubenswrapper[4721]: I1202 12:04:40.042782 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b91afd1f-b9c6-49e2-8c26-93bdf658600e-metrics-certs\") pod \"speaker-m4dmx\" (UID: \"b91afd1f-b9c6-49e2-8c26-93bdf658600e\") " pod="metallb-system/speaker-m4dmx" Dec 02 12:04:40 crc kubenswrapper[4721]: I1202 12:04:40.042816 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/15a0fceb-bf2e-4a05-870e-7501c772ae6c-metrics-certs\") pod \"controller-f8648f98b-clfzp\" (UID: \"15a0fceb-bf2e-4a05-870e-7501c772ae6c\") " pod="metallb-system/controller-f8648f98b-clfzp" Dec 02 12:04:40 crc kubenswrapper[4721]: I1202 12:04:40.042856 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/15a0fceb-bf2e-4a05-870e-7501c772ae6c-cert\") pod \"controller-f8648f98b-clfzp\" (UID: \"15a0fceb-bf2e-4a05-870e-7501c772ae6c\") " pod="metallb-system/controller-f8648f98b-clfzp" Dec 02 12:04:40 crc kubenswrapper[4721]: I1202 12:04:40.042883 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/b91afd1f-b9c6-49e2-8c26-93bdf658600e-metallb-excludel2\") pod \"speaker-m4dmx\" (UID: \"b91afd1f-b9c6-49e2-8c26-93bdf658600e\") " pod="metallb-system/speaker-m4dmx" Dec 02 12:04:40 crc kubenswrapper[4721]: I1202 12:04:40.042918 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24chf\" (UniqueName: \"kubernetes.io/projected/15a0fceb-bf2e-4a05-870e-7501c772ae6c-kube-api-access-24chf\") pod \"controller-f8648f98b-clfzp\" (UID: \"15a0fceb-bf2e-4a05-870e-7501c772ae6c\") " pod="metallb-system/controller-f8648f98b-clfzp" Dec 02 12:04:40 crc kubenswrapper[4721]: E1202 12:04:40.042955 4721 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 02 12:04:40 crc kubenswrapper[4721]: E1202 12:04:40.043051 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b91afd1f-b9c6-49e2-8c26-93bdf658600e-memberlist podName:b91afd1f-b9c6-49e2-8c26-93bdf658600e nodeName:}" failed. No retries permitted until 2025-12-02 12:04:40.543030085 +0000 UTC m=+810.281580434 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/b91afd1f-b9c6-49e2-8c26-93bdf658600e-memberlist") pod "speaker-m4dmx" (UID: "b91afd1f-b9c6-49e2-8c26-93bdf658600e") : secret "metallb-memberlist" not found Dec 02 12:04:40 crc kubenswrapper[4721]: I1202 12:04:40.043132 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrwfv\" (UniqueName: \"kubernetes.io/projected/b91afd1f-b9c6-49e2-8c26-93bdf658600e-kube-api-access-zrwfv\") pod \"speaker-m4dmx\" (UID: \"b91afd1f-b9c6-49e2-8c26-93bdf658600e\") " pod="metallb-system/speaker-m4dmx" Dec 02 12:04:40 crc kubenswrapper[4721]: I1202 12:04:40.043731 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/b91afd1f-b9c6-49e2-8c26-93bdf658600e-metallb-excludel2\") pod \"speaker-m4dmx\" (UID: \"b91afd1f-b9c6-49e2-8c26-93bdf658600e\") " pod="metallb-system/speaker-m4dmx" Dec 02 12:04:40 crc kubenswrapper[4721]: I1202 12:04:40.048828 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b91afd1f-b9c6-49e2-8c26-93bdf658600e-metrics-certs\") pod \"speaker-m4dmx\" (UID: \"b91afd1f-b9c6-49e2-8c26-93bdf658600e\") " pod="metallb-system/speaker-m4dmx" Dec 02 12:04:40 crc kubenswrapper[4721]: I1202 12:04:40.051785 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-qmwqj" Dec 02 12:04:40 crc kubenswrapper[4721]: I1202 12:04:40.062581 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrwfv\" (UniqueName: \"kubernetes.io/projected/b91afd1f-b9c6-49e2-8c26-93bdf658600e-kube-api-access-zrwfv\") pod \"speaker-m4dmx\" (UID: \"b91afd1f-b9c6-49e2-8c26-93bdf658600e\") " pod="metallb-system/speaker-m4dmx" Dec 02 12:04:40 crc kubenswrapper[4721]: I1202 12:04:40.149209 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/15a0fceb-bf2e-4a05-870e-7501c772ae6c-metrics-certs\") pod \"controller-f8648f98b-clfzp\" (UID: \"15a0fceb-bf2e-4a05-870e-7501c772ae6c\") " pod="metallb-system/controller-f8648f98b-clfzp" Dec 02 12:04:40 crc kubenswrapper[4721]: I1202 12:04:40.149279 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/15a0fceb-bf2e-4a05-870e-7501c772ae6c-cert\") pod \"controller-f8648f98b-clfzp\" (UID: \"15a0fceb-bf2e-4a05-870e-7501c772ae6c\") " pod="metallb-system/controller-f8648f98b-clfzp" Dec 02 12:04:40 crc kubenswrapper[4721]: I1202 12:04:40.149326 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24chf\" (UniqueName: \"kubernetes.io/projected/15a0fceb-bf2e-4a05-870e-7501c772ae6c-kube-api-access-24chf\") pod \"controller-f8648f98b-clfzp\" (UID: \"15a0fceb-bf2e-4a05-870e-7501c772ae6c\") " pod="metallb-system/controller-f8648f98b-clfzp" Dec 02 12:04:40 crc kubenswrapper[4721]: I1202 12:04:40.151481 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 02 12:04:40 crc kubenswrapper[4721]: I1202 12:04:40.154992 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/15a0fceb-bf2e-4a05-870e-7501c772ae6c-metrics-certs\") pod \"controller-f8648f98b-clfzp\" (UID: \"15a0fceb-bf2e-4a05-870e-7501c772ae6c\") " pod="metallb-system/controller-f8648f98b-clfzp" Dec 02 12:04:40 crc kubenswrapper[4721]: I1202 12:04:40.163621 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/15a0fceb-bf2e-4a05-870e-7501c772ae6c-cert\") pod \"controller-f8648f98b-clfzp\" (UID: \"15a0fceb-bf2e-4a05-870e-7501c772ae6c\") " pod="metallb-system/controller-f8648f98b-clfzp" Dec 02 12:04:40 crc kubenswrapper[4721]: I1202 12:04:40.171860 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24chf\" (UniqueName: \"kubernetes.io/projected/15a0fceb-bf2e-4a05-870e-7501c772ae6c-kube-api-access-24chf\") pod \"controller-f8648f98b-clfzp\" (UID: \"15a0fceb-bf2e-4a05-870e-7501c772ae6c\") " pod="metallb-system/controller-f8648f98b-clfzp" Dec 02 12:04:40 crc kubenswrapper[4721]: I1202 12:04:40.456121 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9e956286-b38a-40dc-b65c-ab7395592012-metrics-certs\") pod \"frr-k8s-69bnp\" (UID: \"9e956286-b38a-40dc-b65c-ab7395592012\") " pod="metallb-system/frr-k8s-69bnp" Dec 02 12:04:40 crc kubenswrapper[4721]: I1202 12:04:40.456686 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-qmwqj"] Dec 02 12:04:40 crc kubenswrapper[4721]: I1202 12:04:40.459302 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9e956286-b38a-40dc-b65c-ab7395592012-metrics-certs\") pod \"frr-k8s-69bnp\" (UID: \"9e956286-b38a-40dc-b65c-ab7395592012\") " pod="metallb-system/frr-k8s-69bnp" Dec 02 12:04:40 crc kubenswrapper[4721]: W1202 12:04:40.461503 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod33b0035f_5cb5_458f_a5f9_498a488c358f.slice/crio-af1b858638daaad886e18cc453e30b521257ef9ed1118795967acc2c2524199e WatchSource:0}: Error finding container af1b858638daaad886e18cc453e30b521257ef9ed1118795967acc2c2524199e: Status 404 returned error can't find the container with id af1b858638daaad886e18cc453e30b521257ef9ed1118795967acc2c2524199e Dec 02 12:04:40 crc kubenswrapper[4721]: I1202 12:04:40.464596 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-clfzp" Dec 02 12:04:40 crc kubenswrapper[4721]: I1202 12:04:40.558300 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b91afd1f-b9c6-49e2-8c26-93bdf658600e-memberlist\") pod \"speaker-m4dmx\" (UID: \"b91afd1f-b9c6-49e2-8c26-93bdf658600e\") " pod="metallb-system/speaker-m4dmx" Dec 02 12:04:40 crc kubenswrapper[4721]: E1202 12:04:40.558511 4721 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 02 12:04:40 crc kubenswrapper[4721]: E1202 12:04:40.558577 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b91afd1f-b9c6-49e2-8c26-93bdf658600e-memberlist podName:b91afd1f-b9c6-49e2-8c26-93bdf658600e nodeName:}" failed. No retries permitted until 2025-12-02 12:04:41.558558517 +0000 UTC m=+811.297108866 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/b91afd1f-b9c6-49e2-8c26-93bdf658600e-memberlist") pod "speaker-m4dmx" (UID: "b91afd1f-b9c6-49e2-8c26-93bdf658600e") : secret "metallb-memberlist" not found Dec 02 12:04:40 crc kubenswrapper[4721]: I1202 12:04:40.666893 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-69bnp" Dec 02 12:04:40 crc kubenswrapper[4721]: I1202 12:04:40.699159 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-clfzp"] Dec 02 12:04:40 crc kubenswrapper[4721]: W1202 12:04:40.704021 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod15a0fceb_bf2e_4a05_870e_7501c772ae6c.slice/crio-ef795bc19ab62e529c4e95e0ef6026a51771dd2c775606cdb7fe3db34a3b161c WatchSource:0}: Error finding container ef795bc19ab62e529c4e95e0ef6026a51771dd2c775606cdb7fe3db34a3b161c: Status 404 returned error can't find the container with id ef795bc19ab62e529c4e95e0ef6026a51771dd2c775606cdb7fe3db34a3b161c Dec 02 12:04:40 crc kubenswrapper[4721]: I1202 12:04:40.937496 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-clfzp" event={"ID":"15a0fceb-bf2e-4a05-870e-7501c772ae6c","Type":"ContainerStarted","Data":"70930f7f06e43d5c0b647a80053aab4dbbcfab0a7a446fb546ccd7aa07f4316f"} Dec 02 12:04:40 crc kubenswrapper[4721]: I1202 12:04:40.937557 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-clfzp" event={"ID":"15a0fceb-bf2e-4a05-870e-7501c772ae6c","Type":"ContainerStarted","Data":"ef795bc19ab62e529c4e95e0ef6026a51771dd2c775606cdb7fe3db34a3b161c"} Dec 02 12:04:40 crc kubenswrapper[4721]: I1202 12:04:40.938460 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-69bnp" event={"ID":"9e956286-b38a-40dc-b65c-ab7395592012","Type":"ContainerStarted","Data":"fe110993c89c0978ec9602fd9623724dca9645ce5580e7010f8014e5fd7575ed"} Dec 02 12:04:40 crc kubenswrapper[4721]: I1202 12:04:40.939279 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-qmwqj" event={"ID":"33b0035f-5cb5-458f-a5f9-498a488c358f","Type":"ContainerStarted","Data":"af1b858638daaad886e18cc453e30b521257ef9ed1118795967acc2c2524199e"} Dec 02 12:04:41 crc kubenswrapper[4721]: I1202 12:04:41.571114 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b91afd1f-b9c6-49e2-8c26-93bdf658600e-memberlist\") pod \"speaker-m4dmx\" (UID: \"b91afd1f-b9c6-49e2-8c26-93bdf658600e\") " pod="metallb-system/speaker-m4dmx" Dec 02 12:04:41 crc kubenswrapper[4721]: I1202 12:04:41.576236 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b91afd1f-b9c6-49e2-8c26-93bdf658600e-memberlist\") pod \"speaker-m4dmx\" (UID: \"b91afd1f-b9c6-49e2-8c26-93bdf658600e\") " pod="metallb-system/speaker-m4dmx" Dec 02 12:04:41 crc kubenswrapper[4721]: I1202 12:04:41.629678 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-m4dmx" Dec 02 12:04:41 crc kubenswrapper[4721]: W1202 12:04:41.648780 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb91afd1f_b9c6_49e2_8c26_93bdf658600e.slice/crio-c0e8cbb0f3e30c256749ee36c75f85952355b90f5d51dcae52444f5f3b908261 WatchSource:0}: Error finding container c0e8cbb0f3e30c256749ee36c75f85952355b90f5d51dcae52444f5f3b908261: Status 404 returned error can't find the container with id c0e8cbb0f3e30c256749ee36c75f85952355b90f5d51dcae52444f5f3b908261 Dec 02 12:04:41 crc kubenswrapper[4721]: I1202 12:04:41.945482 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-m4dmx" event={"ID":"b91afd1f-b9c6-49e2-8c26-93bdf658600e","Type":"ContainerStarted","Data":"774cdceba0fa4ed8b7a8cb4bfe93ec8edae3b71a18a97f2f2809832530d05e83"} Dec 02 12:04:41 crc kubenswrapper[4721]: I1202 12:04:41.945523 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-m4dmx" event={"ID":"b91afd1f-b9c6-49e2-8c26-93bdf658600e","Type":"ContainerStarted","Data":"c0e8cbb0f3e30c256749ee36c75f85952355b90f5d51dcae52444f5f3b908261"} Dec 02 12:04:44 crc kubenswrapper[4721]: I1202 12:04:44.966824 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-clfzp" event={"ID":"15a0fceb-bf2e-4a05-870e-7501c772ae6c","Type":"ContainerStarted","Data":"202e18f879773224f0c70264dadb71c3380fd7bdab63800b890759383dbee686"} Dec 02 12:04:44 crc kubenswrapper[4721]: I1202 12:04:44.967198 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-clfzp" Dec 02 12:04:44 crc kubenswrapper[4721]: I1202 12:04:44.984388 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-clfzp" podStartSLOduration=2.007364697 podStartE2EDuration="5.984375145s" podCreationTimestamp="2025-12-02 12:04:39 +0000 UTC" firstStartedPulling="2025-12-02 12:04:40.79946459 +0000 UTC m=+810.538014929" lastFinishedPulling="2025-12-02 12:04:44.776475028 +0000 UTC m=+814.515025377" observedRunningTime="2025-12-02 12:04:44.983478487 +0000 UTC m=+814.722028856" watchObservedRunningTime="2025-12-02 12:04:44.984375145 +0000 UTC m=+814.722925494" Dec 02 12:04:45 crc kubenswrapper[4721]: I1202 12:04:45.990878 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-m4dmx" event={"ID":"b91afd1f-b9c6-49e2-8c26-93bdf658600e","Type":"ContainerStarted","Data":"37e7900b69fbb43b7b0a3ea96d9a4cf26091b502cdd77a2316a4572659674056"} Dec 02 12:04:46 crc kubenswrapper[4721]: I1202 12:04:46.010594 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-m4dmx" podStartSLOduration=4.105197735 podStartE2EDuration="7.010579171s" podCreationTimestamp="2025-12-02 12:04:39 +0000 UTC" firstStartedPulling="2025-12-02 12:04:41.888535379 +0000 UTC m=+811.627085728" lastFinishedPulling="2025-12-02 12:04:44.793916815 +0000 UTC m=+814.532467164" observedRunningTime="2025-12-02 12:04:46.007594689 +0000 UTC m=+815.746145048" watchObservedRunningTime="2025-12-02 12:04:46.010579171 +0000 UTC m=+815.749129520" Dec 02 12:04:46 crc kubenswrapper[4721]: I1202 12:04:46.995175 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-m4dmx" Dec 02 12:04:47 crc kubenswrapper[4721]: I1202 12:04:47.999642 4721 generic.go:334] "Generic (PLEG): container finished" podID="9e956286-b38a-40dc-b65c-ab7395592012" containerID="1c6e79c6ba42faec6c41ae5664d3fa24589288e57f4335166a56508bd1853878" exitCode=0 Dec 02 12:04:48 crc kubenswrapper[4721]: I1202 12:04:47.999700 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-69bnp" event={"ID":"9e956286-b38a-40dc-b65c-ab7395592012","Type":"ContainerDied","Data":"1c6e79c6ba42faec6c41ae5664d3fa24589288e57f4335166a56508bd1853878"} Dec 02 12:04:48 crc kubenswrapper[4721]: I1202 12:04:48.001446 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-qmwqj" event={"ID":"33b0035f-5cb5-458f-a5f9-498a488c358f","Type":"ContainerStarted","Data":"8a84654306becb02fdf59842d8bebddda220324e0c6b83370c5bb2770202e73d"} Dec 02 12:04:48 crc kubenswrapper[4721]: I1202 12:04:48.041705 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-qmwqj" podStartSLOduration=2.281952287 podStartE2EDuration="9.041684946s" podCreationTimestamp="2025-12-02 12:04:39 +0000 UTC" firstStartedPulling="2025-12-02 12:04:40.467552878 +0000 UTC m=+810.206103227" lastFinishedPulling="2025-12-02 12:04:47.227285537 +0000 UTC m=+816.965835886" observedRunningTime="2025-12-02 12:04:48.03890978 +0000 UTC m=+817.777460129" watchObservedRunningTime="2025-12-02 12:04:48.041684946 +0000 UTC m=+817.780235305" Dec 02 12:04:49 crc kubenswrapper[4721]: I1202 12:04:49.008883 4721 generic.go:334] "Generic (PLEG): container finished" podID="9e956286-b38a-40dc-b65c-ab7395592012" containerID="b7ab1f83c796b1c7d60b7fa4ce73f4154950d0db1d9454b1a1ac07d5c3a9cf25" exitCode=0 Dec 02 12:04:49 crc kubenswrapper[4721]: I1202 12:04:49.008925 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-69bnp" event={"ID":"9e956286-b38a-40dc-b65c-ab7395592012","Type":"ContainerDied","Data":"b7ab1f83c796b1c7d60b7fa4ce73f4154950d0db1d9454b1a1ac07d5c3a9cf25"} Dec 02 12:04:49 crc kubenswrapper[4721]: I1202 12:04:49.009935 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-qmwqj" Dec 02 12:04:50 crc kubenswrapper[4721]: I1202 12:04:50.018334 4721 generic.go:334] "Generic (PLEG): container finished" podID="9e956286-b38a-40dc-b65c-ab7395592012" containerID="dff82703b52469766ccbe380e06c4be47ff2c5b2a83879331e2a53f05d43dec7" exitCode=0 Dec 02 12:04:50 crc kubenswrapper[4721]: I1202 12:04:50.018457 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-69bnp" event={"ID":"9e956286-b38a-40dc-b65c-ab7395592012","Type":"ContainerDied","Data":"dff82703b52469766ccbe380e06c4be47ff2c5b2a83879331e2a53f05d43dec7"} Dec 02 12:04:50 crc kubenswrapper[4721]: I1202 12:04:50.469034 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-clfzp" Dec 02 12:04:51 crc kubenswrapper[4721]: I1202 12:04:51.026396 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-69bnp" event={"ID":"9e956286-b38a-40dc-b65c-ab7395592012","Type":"ContainerStarted","Data":"0ffa80cf9f48beec639b728bdcc9b2276e1e4d2e0652648e1f96adf7a7d8c5e4"} Dec 02 12:04:51 crc kubenswrapper[4721]: I1202 12:04:51.026432 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-69bnp" event={"ID":"9e956286-b38a-40dc-b65c-ab7395592012","Type":"ContainerStarted","Data":"d047f732dccf3094ea13e601993cd3718ddda466d242739a107fabb09e5ab84b"} Dec 02 12:04:51 crc kubenswrapper[4721]: I1202 12:04:51.026442 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-69bnp" event={"ID":"9e956286-b38a-40dc-b65c-ab7395592012","Type":"ContainerStarted","Data":"b38e148ac70914455652a35d8f715b5d979d05d7646afae3026bc3cf58180a60"} Dec 02 12:04:51 crc kubenswrapper[4721]: I1202 12:04:51.026452 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-69bnp" event={"ID":"9e956286-b38a-40dc-b65c-ab7395592012","Type":"ContainerStarted","Data":"3fa8b65364326aaff644356b6d518dea2b6ba8c042ece72cd45ade6421160abe"} Dec 02 12:04:51 crc kubenswrapper[4721]: I1202 12:04:51.026461 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-69bnp" event={"ID":"9e956286-b38a-40dc-b65c-ab7395592012","Type":"ContainerStarted","Data":"72b69fb54373598fe7776a0844ae4ca558b72e2be4ee23ece79db8c0968b2684"} Dec 02 12:04:51 crc kubenswrapper[4721]: I1202 12:04:51.026468 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-69bnp" event={"ID":"9e956286-b38a-40dc-b65c-ab7395592012","Type":"ContainerStarted","Data":"dd14f3db19e82dcf172db91c3016dc0efd85af2464c03459ce79162a7c1e19a4"} Dec 02 12:04:51 crc kubenswrapper[4721]: I1202 12:04:51.026564 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-69bnp" Dec 02 12:04:51 crc kubenswrapper[4721]: I1202 12:04:51.052726 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-69bnp" podStartSLOduration=5.570743018 podStartE2EDuration="12.052711592s" podCreationTimestamp="2025-12-02 12:04:39 +0000 UTC" firstStartedPulling="2025-12-02 12:04:40.76274829 +0000 UTC m=+810.501298639" lastFinishedPulling="2025-12-02 12:04:47.244716864 +0000 UTC m=+816.983267213" observedRunningTime="2025-12-02 12:04:51.048989048 +0000 UTC m=+820.787539407" watchObservedRunningTime="2025-12-02 12:04:51.052711592 +0000 UTC m=+820.791261941" Dec 02 12:04:51 crc kubenswrapper[4721]: I1202 12:04:51.634150 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-m4dmx" Dec 02 12:04:55 crc kubenswrapper[4721]: I1202 12:04:55.667759 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-69bnp" Dec 02 12:04:55 crc kubenswrapper[4721]: I1202 12:04:55.702847 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mcr6s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 12:04:55 crc kubenswrapper[4721]: I1202 12:04:55.702930 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 12:04:55 crc kubenswrapper[4721]: I1202 12:04:55.702993 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" Dec 02 12:04:55 crc kubenswrapper[4721]: I1202 12:04:55.703957 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"240b7cf6f3c5851e9a4d93bbfdd59e21dcf90d1e5f54d8148006085d773e5e7b"} pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 12:04:55 crc kubenswrapper[4721]: I1202 12:04:55.704069 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" containerID="cri-o://240b7cf6f3c5851e9a4d93bbfdd59e21dcf90d1e5f54d8148006085d773e5e7b" gracePeriod=600 Dec 02 12:04:55 crc kubenswrapper[4721]: I1202 12:04:55.732024 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-69bnp" Dec 02 12:04:56 crc kubenswrapper[4721]: I1202 12:04:56.061626 4721 generic.go:334] "Generic (PLEG): container finished" podID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerID="240b7cf6f3c5851e9a4d93bbfdd59e21dcf90d1e5f54d8148006085d773e5e7b" exitCode=0 Dec 02 12:04:56 crc kubenswrapper[4721]: I1202 12:04:56.061665 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" event={"ID":"141842b6-fd0c-48e6-9795-a627e6f96bd4","Type":"ContainerDied","Data":"240b7cf6f3c5851e9a4d93bbfdd59e21dcf90d1e5f54d8148006085d773e5e7b"} Dec 02 12:04:56 crc kubenswrapper[4721]: I1202 12:04:56.061773 4721 scope.go:117] "RemoveContainer" containerID="2b5a3e04a093afd734df76e21cc539ac2697b5e275c6cc77be5a62e443752e2f" Dec 02 12:04:57 crc kubenswrapper[4721]: I1202 12:04:57.068069 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" event={"ID":"141842b6-fd0c-48e6-9795-a627e6f96bd4","Type":"ContainerStarted","Data":"454276a48c0c3533be2304833299320cbec4257417769197c8e6697daf1879ef"} Dec 02 12:04:58 crc kubenswrapper[4721]: I1202 12:04:58.779069 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-index-r7bbx"] Dec 02 12:04:58 crc kubenswrapper[4721]: I1202 12:04:58.780219 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-r7bbx" Dec 02 12:04:58 crc kubenswrapper[4721]: I1202 12:04:58.783548 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-index-dockercfg-xnd8q" Dec 02 12:04:58 crc kubenswrapper[4721]: I1202 12:04:58.785330 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 02 12:04:58 crc kubenswrapper[4721]: I1202 12:04:58.785584 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 02 12:04:58 crc kubenswrapper[4721]: I1202 12:04:58.798393 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-r7bbx"] Dec 02 12:04:58 crc kubenswrapper[4721]: I1202 12:04:58.914199 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrhsm\" (UniqueName: \"kubernetes.io/projected/f359f3b5-b22e-4c61-81f7-0e0ab08435e4-kube-api-access-zrhsm\") pod \"mariadb-operator-index-r7bbx\" (UID: \"f359f3b5-b22e-4c61-81f7-0e0ab08435e4\") " pod="openstack-operators/mariadb-operator-index-r7bbx" Dec 02 12:04:59 crc kubenswrapper[4721]: I1202 12:04:59.015123 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrhsm\" (UniqueName: \"kubernetes.io/projected/f359f3b5-b22e-4c61-81f7-0e0ab08435e4-kube-api-access-zrhsm\") pod \"mariadb-operator-index-r7bbx\" (UID: \"f359f3b5-b22e-4c61-81f7-0e0ab08435e4\") " pod="openstack-operators/mariadb-operator-index-r7bbx" Dec 02 12:04:59 crc kubenswrapper[4721]: I1202 12:04:59.032980 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrhsm\" (UniqueName: \"kubernetes.io/projected/f359f3b5-b22e-4c61-81f7-0e0ab08435e4-kube-api-access-zrhsm\") pod \"mariadb-operator-index-r7bbx\" (UID: \"f359f3b5-b22e-4c61-81f7-0e0ab08435e4\") " pod="openstack-operators/mariadb-operator-index-r7bbx" Dec 02 12:04:59 crc kubenswrapper[4721]: I1202 12:04:59.101994 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-r7bbx" Dec 02 12:04:59 crc kubenswrapper[4721]: I1202 12:04:59.276816 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-r7bbx"] Dec 02 12:04:59 crc kubenswrapper[4721]: W1202 12:04:59.293081 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf359f3b5_b22e_4c61_81f7_0e0ab08435e4.slice/crio-be843bc97d9dab78c4d216a17635cfcd8a377284db94840a47a4100f5dc7ce99 WatchSource:0}: Error finding container be843bc97d9dab78c4d216a17635cfcd8a377284db94840a47a4100f5dc7ce99: Status 404 returned error can't find the container with id be843bc97d9dab78c4d216a17635cfcd8a377284db94840a47a4100f5dc7ce99 Dec 02 12:05:00 crc kubenswrapper[4721]: I1202 12:05:00.057052 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-qmwqj" Dec 02 12:05:00 crc kubenswrapper[4721]: I1202 12:05:00.092829 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-r7bbx" event={"ID":"f359f3b5-b22e-4c61-81f7-0e0ab08435e4","Type":"ContainerStarted","Data":"be843bc97d9dab78c4d216a17635cfcd8a377284db94840a47a4100f5dc7ce99"} Dec 02 12:05:00 crc kubenswrapper[4721]: I1202 12:05:00.671784 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-69bnp" Dec 02 12:05:01 crc kubenswrapper[4721]: I1202 12:05:01.564396 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-r7bbx"] Dec 02 12:05:02 crc kubenswrapper[4721]: I1202 12:05:02.174179 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-index-xcbcl"] Dec 02 12:05:02 crc kubenswrapper[4721]: I1202 12:05:02.174991 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-xcbcl" Dec 02 12:05:02 crc kubenswrapper[4721]: I1202 12:05:02.187812 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-xcbcl"] Dec 02 12:05:02 crc kubenswrapper[4721]: I1202 12:05:02.256232 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-md7g6\" (UniqueName: \"kubernetes.io/projected/17f5a185-b59b-4fa4-9ae1-8f664d3d7a58-kube-api-access-md7g6\") pod \"mariadb-operator-index-xcbcl\" (UID: \"17f5a185-b59b-4fa4-9ae1-8f664d3d7a58\") " pod="openstack-operators/mariadb-operator-index-xcbcl" Dec 02 12:05:02 crc kubenswrapper[4721]: I1202 12:05:02.357775 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-md7g6\" (UniqueName: \"kubernetes.io/projected/17f5a185-b59b-4fa4-9ae1-8f664d3d7a58-kube-api-access-md7g6\") pod \"mariadb-operator-index-xcbcl\" (UID: \"17f5a185-b59b-4fa4-9ae1-8f664d3d7a58\") " pod="openstack-operators/mariadb-operator-index-xcbcl" Dec 02 12:05:02 crc kubenswrapper[4721]: I1202 12:05:02.386541 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-md7g6\" (UniqueName: \"kubernetes.io/projected/17f5a185-b59b-4fa4-9ae1-8f664d3d7a58-kube-api-access-md7g6\") pod \"mariadb-operator-index-xcbcl\" (UID: \"17f5a185-b59b-4fa4-9ae1-8f664d3d7a58\") " pod="openstack-operators/mariadb-operator-index-xcbcl" Dec 02 12:05:02 crc kubenswrapper[4721]: I1202 12:05:02.500804 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-xcbcl" Dec 02 12:05:06 crc kubenswrapper[4721]: I1202 12:05:06.060226 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-xcbcl"] Dec 02 12:05:06 crc kubenswrapper[4721]: W1202 12:05:06.067972 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod17f5a185_b59b_4fa4_9ae1_8f664d3d7a58.slice/crio-418dfd6a1b3816b8e11735fea0668f1bf57bce4cb3176d7d18dc283cf66cd07e WatchSource:0}: Error finding container 418dfd6a1b3816b8e11735fea0668f1bf57bce4cb3176d7d18dc283cf66cd07e: Status 404 returned error can't find the container with id 418dfd6a1b3816b8e11735fea0668f1bf57bce4cb3176d7d18dc283cf66cd07e Dec 02 12:05:06 crc kubenswrapper[4721]: I1202 12:05:06.126063 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-xcbcl" event={"ID":"17f5a185-b59b-4fa4-9ae1-8f664d3d7a58","Type":"ContainerStarted","Data":"418dfd6a1b3816b8e11735fea0668f1bf57bce4cb3176d7d18dc283cf66cd07e"} Dec 02 12:05:07 crc kubenswrapper[4721]: I1202 12:05:07.133439 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-r7bbx" event={"ID":"f359f3b5-b22e-4c61-81f7-0e0ab08435e4","Type":"ContainerStarted","Data":"1121e96606c0a88f2d77ef14e1a6f790e632771986e10aa9283ff6357a3b4cac"} Dec 02 12:05:07 crc kubenswrapper[4721]: I1202 12:05:07.135059 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-xcbcl" event={"ID":"17f5a185-b59b-4fa4-9ae1-8f664d3d7a58","Type":"ContainerStarted","Data":"3f1af360ba915913c138671254da62c399e4397481c2a654afefa8511bc6d56a"} Dec 02 12:05:07 crc kubenswrapper[4721]: I1202 12:05:07.133537 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-index-r7bbx" podUID="f359f3b5-b22e-4c61-81f7-0e0ab08435e4" containerName="registry-server" containerID="cri-o://1121e96606c0a88f2d77ef14e1a6f790e632771986e10aa9283ff6357a3b4cac" gracePeriod=2 Dec 02 12:05:07 crc kubenswrapper[4721]: I1202 12:05:07.158227 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-index-r7bbx" podStartSLOduration=2.442405303 podStartE2EDuration="9.158030927s" podCreationTimestamp="2025-12-02 12:04:58 +0000 UTC" firstStartedPulling="2025-12-02 12:04:59.295118052 +0000 UTC m=+829.033668411" lastFinishedPulling="2025-12-02 12:05:06.010743686 +0000 UTC m=+835.749294035" observedRunningTime="2025-12-02 12:05:07.154396655 +0000 UTC m=+836.892947014" watchObservedRunningTime="2025-12-02 12:05:07.158030927 +0000 UTC m=+836.896581316" Dec 02 12:05:07 crc kubenswrapper[4721]: I1202 12:05:07.175723 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-index-xcbcl" podStartSLOduration=4.578953199 podStartE2EDuration="5.17569966s" podCreationTimestamp="2025-12-02 12:05:02 +0000 UTC" firstStartedPulling="2025-12-02 12:05:06.072733043 +0000 UTC m=+835.811283392" lastFinishedPulling="2025-12-02 12:05:06.669479474 +0000 UTC m=+836.408029853" observedRunningTime="2025-12-02 12:05:07.171820731 +0000 UTC m=+836.910371080" watchObservedRunningTime="2025-12-02 12:05:07.17569966 +0000 UTC m=+836.914250019" Dec 02 12:05:07 crc kubenswrapper[4721]: I1202 12:05:07.550116 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-r7bbx" Dec 02 12:05:07 crc kubenswrapper[4721]: I1202 12:05:07.623510 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zrhsm\" (UniqueName: \"kubernetes.io/projected/f359f3b5-b22e-4c61-81f7-0e0ab08435e4-kube-api-access-zrhsm\") pod \"f359f3b5-b22e-4c61-81f7-0e0ab08435e4\" (UID: \"f359f3b5-b22e-4c61-81f7-0e0ab08435e4\") " Dec 02 12:05:07 crc kubenswrapper[4721]: I1202 12:05:07.631842 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f359f3b5-b22e-4c61-81f7-0e0ab08435e4-kube-api-access-zrhsm" (OuterVolumeSpecName: "kube-api-access-zrhsm") pod "f359f3b5-b22e-4c61-81f7-0e0ab08435e4" (UID: "f359f3b5-b22e-4c61-81f7-0e0ab08435e4"). InnerVolumeSpecName "kube-api-access-zrhsm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:05:07 crc kubenswrapper[4721]: I1202 12:05:07.724940 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zrhsm\" (UniqueName: \"kubernetes.io/projected/f359f3b5-b22e-4c61-81f7-0e0ab08435e4-kube-api-access-zrhsm\") on node \"crc\" DevicePath \"\"" Dec 02 12:05:08 crc kubenswrapper[4721]: I1202 12:05:08.143303 4721 generic.go:334] "Generic (PLEG): container finished" podID="f359f3b5-b22e-4c61-81f7-0e0ab08435e4" containerID="1121e96606c0a88f2d77ef14e1a6f790e632771986e10aa9283ff6357a3b4cac" exitCode=0 Dec 02 12:05:08 crc kubenswrapper[4721]: I1202 12:05:08.143403 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-r7bbx" Dec 02 12:05:08 crc kubenswrapper[4721]: I1202 12:05:08.143433 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-r7bbx" event={"ID":"f359f3b5-b22e-4c61-81f7-0e0ab08435e4","Type":"ContainerDied","Data":"1121e96606c0a88f2d77ef14e1a6f790e632771986e10aa9283ff6357a3b4cac"} Dec 02 12:05:08 crc kubenswrapper[4721]: I1202 12:05:08.143506 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-r7bbx" event={"ID":"f359f3b5-b22e-4c61-81f7-0e0ab08435e4","Type":"ContainerDied","Data":"be843bc97d9dab78c4d216a17635cfcd8a377284db94840a47a4100f5dc7ce99"} Dec 02 12:05:08 crc kubenswrapper[4721]: I1202 12:05:08.143552 4721 scope.go:117] "RemoveContainer" containerID="1121e96606c0a88f2d77ef14e1a6f790e632771986e10aa9283ff6357a3b4cac" Dec 02 12:05:08 crc kubenswrapper[4721]: I1202 12:05:08.167114 4721 scope.go:117] "RemoveContainer" containerID="1121e96606c0a88f2d77ef14e1a6f790e632771986e10aa9283ff6357a3b4cac" Dec 02 12:05:08 crc kubenswrapper[4721]: E1202 12:05:08.168171 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1121e96606c0a88f2d77ef14e1a6f790e632771986e10aa9283ff6357a3b4cac\": container with ID starting with 1121e96606c0a88f2d77ef14e1a6f790e632771986e10aa9283ff6357a3b4cac not found: ID does not exist" containerID="1121e96606c0a88f2d77ef14e1a6f790e632771986e10aa9283ff6357a3b4cac" Dec 02 12:05:08 crc kubenswrapper[4721]: I1202 12:05:08.169419 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1121e96606c0a88f2d77ef14e1a6f790e632771986e10aa9283ff6357a3b4cac"} err="failed to get container status \"1121e96606c0a88f2d77ef14e1a6f790e632771986e10aa9283ff6357a3b4cac\": rpc error: code = NotFound desc = could not find container \"1121e96606c0a88f2d77ef14e1a6f790e632771986e10aa9283ff6357a3b4cac\": container with ID starting with 1121e96606c0a88f2d77ef14e1a6f790e632771986e10aa9283ff6357a3b4cac not found: ID does not exist" Dec 02 12:05:08 crc kubenswrapper[4721]: I1202 12:05:08.184484 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-r7bbx"] Dec 02 12:05:08 crc kubenswrapper[4721]: I1202 12:05:08.189991 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-index-r7bbx"] Dec 02 12:05:08 crc kubenswrapper[4721]: I1202 12:05:08.848564 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f359f3b5-b22e-4c61-81f7-0e0ab08435e4" path="/var/lib/kubelet/pods/f359f3b5-b22e-4c61-81f7-0e0ab08435e4/volumes" Dec 02 12:05:12 crc kubenswrapper[4721]: I1202 12:05:12.500922 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-index-xcbcl" Dec 02 12:05:12 crc kubenswrapper[4721]: I1202 12:05:12.501443 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/mariadb-operator-index-xcbcl" Dec 02 12:05:12 crc kubenswrapper[4721]: I1202 12:05:12.531313 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/mariadb-operator-index-xcbcl" Dec 02 12:05:13 crc kubenswrapper[4721]: I1202 12:05:13.202843 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-index-xcbcl" Dec 02 12:05:19 crc kubenswrapper[4721]: I1202 12:05:19.261043 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng"] Dec 02 12:05:19 crc kubenswrapper[4721]: E1202 12:05:19.261983 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f359f3b5-b22e-4c61-81f7-0e0ab08435e4" containerName="registry-server" Dec 02 12:05:19 crc kubenswrapper[4721]: I1202 12:05:19.262000 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f359f3b5-b22e-4c61-81f7-0e0ab08435e4" containerName="registry-server" Dec 02 12:05:19 crc kubenswrapper[4721]: I1202 12:05:19.262147 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f359f3b5-b22e-4c61-81f7-0e0ab08435e4" containerName="registry-server" Dec 02 12:05:19 crc kubenswrapper[4721]: I1202 12:05:19.263226 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng" Dec 02 12:05:19 crc kubenswrapper[4721]: I1202 12:05:19.265580 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-6fpcc" Dec 02 12:05:19 crc kubenswrapper[4721]: I1202 12:05:19.269727 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng"] Dec 02 12:05:19 crc kubenswrapper[4721]: I1202 12:05:19.387577 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6-bundle\") pod \"27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng\" (UID: \"1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6\") " pod="openstack-operators/27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng" Dec 02 12:05:19 crc kubenswrapper[4721]: I1202 12:05:19.387665 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkx86\" (UniqueName: \"kubernetes.io/projected/1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6-kube-api-access-bkx86\") pod \"27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng\" (UID: \"1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6\") " pod="openstack-operators/27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng" Dec 02 12:05:19 crc kubenswrapper[4721]: I1202 12:05:19.387729 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6-util\") pod \"27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng\" (UID: \"1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6\") " pod="openstack-operators/27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng" Dec 02 12:05:19 crc kubenswrapper[4721]: I1202 12:05:19.488886 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkx86\" (UniqueName: \"kubernetes.io/projected/1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6-kube-api-access-bkx86\") pod \"27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng\" (UID: \"1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6\") " pod="openstack-operators/27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng" Dec 02 12:05:19 crc kubenswrapper[4721]: I1202 12:05:19.489119 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6-util\") pod \"27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng\" (UID: \"1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6\") " pod="openstack-operators/27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng" Dec 02 12:05:19 crc kubenswrapper[4721]: I1202 12:05:19.489206 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6-bundle\") pod \"27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng\" (UID: \"1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6\") " pod="openstack-operators/27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng" Dec 02 12:05:19 crc kubenswrapper[4721]: I1202 12:05:19.489688 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6-util\") pod \"27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng\" (UID: \"1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6\") " pod="openstack-operators/27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng" Dec 02 12:05:19 crc kubenswrapper[4721]: I1202 12:05:19.489906 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6-bundle\") pod \"27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng\" (UID: \"1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6\") " pod="openstack-operators/27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng" Dec 02 12:05:19 crc kubenswrapper[4721]: I1202 12:05:19.518811 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkx86\" (UniqueName: \"kubernetes.io/projected/1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6-kube-api-access-bkx86\") pod \"27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng\" (UID: \"1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6\") " pod="openstack-operators/27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng" Dec 02 12:05:19 crc kubenswrapper[4721]: I1202 12:05:19.626480 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng" Dec 02 12:05:19 crc kubenswrapper[4721]: I1202 12:05:19.837141 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng"] Dec 02 12:05:20 crc kubenswrapper[4721]: I1202 12:05:20.217585 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng" event={"ID":"1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6","Type":"ContainerStarted","Data":"04c46db80b69215df574761afc309c8782f2385564ca05cc1116a577e08c5c93"} Dec 02 12:05:21 crc kubenswrapper[4721]: I1202 12:05:21.225758 4721 generic.go:334] "Generic (PLEG): container finished" podID="1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6" containerID="8867cd60b44e32998edc44d0665008f6aa6ef8d6916aaf67946423d4b7610a05" exitCode=0 Dec 02 12:05:21 crc kubenswrapper[4721]: I1202 12:05:21.225884 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng" event={"ID":"1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6","Type":"ContainerDied","Data":"8867cd60b44e32998edc44d0665008f6aa6ef8d6916aaf67946423d4b7610a05"} Dec 02 12:05:23 crc kubenswrapper[4721]: I1202 12:05:23.242402 4721 generic.go:334] "Generic (PLEG): container finished" podID="1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6" containerID="9e9caea58dfca76a063dd874352d9f9858dd92aded0f4e64c849be5341430382" exitCode=0 Dec 02 12:05:23 crc kubenswrapper[4721]: I1202 12:05:23.242470 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng" event={"ID":"1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6","Type":"ContainerDied","Data":"9e9caea58dfca76a063dd874352d9f9858dd92aded0f4e64c849be5341430382"} Dec 02 12:05:24 crc kubenswrapper[4721]: I1202 12:05:24.251245 4721 generic.go:334] "Generic (PLEG): container finished" podID="1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6" containerID="6ddf107d4c6a19d18c52a20eda9aee5c810eca972126e37d6f26ba4a18a55fbb" exitCode=0 Dec 02 12:05:24 crc kubenswrapper[4721]: I1202 12:05:24.251338 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng" event={"ID":"1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6","Type":"ContainerDied","Data":"6ddf107d4c6a19d18c52a20eda9aee5c810eca972126e37d6f26ba4a18a55fbb"} Dec 02 12:05:25 crc kubenswrapper[4721]: I1202 12:05:25.469963 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng" Dec 02 12:05:25 crc kubenswrapper[4721]: I1202 12:05:25.572409 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6-bundle\") pod \"1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6\" (UID: \"1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6\") " Dec 02 12:05:25 crc kubenswrapper[4721]: I1202 12:05:25.572461 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6-util\") pod \"1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6\" (UID: \"1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6\") " Dec 02 12:05:25 crc kubenswrapper[4721]: I1202 12:05:25.572519 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bkx86\" (UniqueName: \"kubernetes.io/projected/1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6-kube-api-access-bkx86\") pod \"1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6\" (UID: \"1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6\") " Dec 02 12:05:25 crc kubenswrapper[4721]: I1202 12:05:25.574555 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6-bundle" (OuterVolumeSpecName: "bundle") pod "1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6" (UID: "1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:05:25 crc kubenswrapper[4721]: I1202 12:05:25.581577 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6-kube-api-access-bkx86" (OuterVolumeSpecName: "kube-api-access-bkx86") pod "1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6" (UID: "1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6"). InnerVolumeSpecName "kube-api-access-bkx86". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:05:25 crc kubenswrapper[4721]: I1202 12:05:25.585273 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6-util" (OuterVolumeSpecName: "util") pod "1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6" (UID: "1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:05:25 crc kubenswrapper[4721]: I1202 12:05:25.674335 4721 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 12:05:25 crc kubenswrapper[4721]: I1202 12:05:25.674384 4721 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6-util\") on node \"crc\" DevicePath \"\"" Dec 02 12:05:25 crc kubenswrapper[4721]: I1202 12:05:25.674395 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bkx86\" (UniqueName: \"kubernetes.io/projected/1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6-kube-api-access-bkx86\") on node \"crc\" DevicePath \"\"" Dec 02 12:05:26 crc kubenswrapper[4721]: I1202 12:05:26.267905 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng" event={"ID":"1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6","Type":"ContainerDied","Data":"04c46db80b69215df574761afc309c8782f2385564ca05cc1116a577e08c5c93"} Dec 02 12:05:26 crc kubenswrapper[4721]: I1202 12:05:26.268378 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="04c46db80b69215df574761afc309c8782f2385564ca05cc1116a577e08c5c93" Dec 02 12:05:26 crc kubenswrapper[4721]: I1202 12:05:26.268018 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng" Dec 02 12:05:32 crc kubenswrapper[4721]: I1202 12:05:32.290635 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-9f59d8d87-lgws5"] Dec 02 12:05:32 crc kubenswrapper[4721]: E1202 12:05:32.291413 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6" containerName="pull" Dec 02 12:05:32 crc kubenswrapper[4721]: I1202 12:05:32.291426 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6" containerName="pull" Dec 02 12:05:32 crc kubenswrapper[4721]: E1202 12:05:32.291435 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6" containerName="extract" Dec 02 12:05:32 crc kubenswrapper[4721]: I1202 12:05:32.291441 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6" containerName="extract" Dec 02 12:05:32 crc kubenswrapper[4721]: E1202 12:05:32.291456 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6" containerName="util" Dec 02 12:05:32 crc kubenswrapper[4721]: I1202 12:05:32.291462 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6" containerName="util" Dec 02 12:05:32 crc kubenswrapper[4721]: I1202 12:05:32.291551 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6" containerName="extract" Dec 02 12:05:32 crc kubenswrapper[4721]: I1202 12:05:32.291913 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-9f59d8d87-lgws5" Dec 02 12:05:32 crc kubenswrapper[4721]: I1202 12:05:32.294310 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-service-cert" Dec 02 12:05:32 crc kubenswrapper[4721]: I1202 12:05:32.294956 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-fkzhq" Dec 02 12:05:32 crc kubenswrapper[4721]: I1202 12:05:32.296058 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 02 12:05:32 crc kubenswrapper[4721]: I1202 12:05:32.309113 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-9f59d8d87-lgws5"] Dec 02 12:05:32 crc kubenswrapper[4721]: I1202 12:05:32.364503 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgtgq\" (UniqueName: \"kubernetes.io/projected/f1e8e66a-9b25-4d2a-94b7-afc09915748a-kube-api-access-mgtgq\") pod \"mariadb-operator-controller-manager-9f59d8d87-lgws5\" (UID: \"f1e8e66a-9b25-4d2a-94b7-afc09915748a\") " pod="openstack-operators/mariadb-operator-controller-manager-9f59d8d87-lgws5" Dec 02 12:05:32 crc kubenswrapper[4721]: I1202 12:05:32.364561 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f1e8e66a-9b25-4d2a-94b7-afc09915748a-apiservice-cert\") pod \"mariadb-operator-controller-manager-9f59d8d87-lgws5\" (UID: \"f1e8e66a-9b25-4d2a-94b7-afc09915748a\") " pod="openstack-operators/mariadb-operator-controller-manager-9f59d8d87-lgws5" Dec 02 12:05:32 crc kubenswrapper[4721]: I1202 12:05:32.364582 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f1e8e66a-9b25-4d2a-94b7-afc09915748a-webhook-cert\") pod \"mariadb-operator-controller-manager-9f59d8d87-lgws5\" (UID: \"f1e8e66a-9b25-4d2a-94b7-afc09915748a\") " pod="openstack-operators/mariadb-operator-controller-manager-9f59d8d87-lgws5" Dec 02 12:05:32 crc kubenswrapper[4721]: I1202 12:05:32.465838 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgtgq\" (UniqueName: \"kubernetes.io/projected/f1e8e66a-9b25-4d2a-94b7-afc09915748a-kube-api-access-mgtgq\") pod \"mariadb-operator-controller-manager-9f59d8d87-lgws5\" (UID: \"f1e8e66a-9b25-4d2a-94b7-afc09915748a\") " pod="openstack-operators/mariadb-operator-controller-manager-9f59d8d87-lgws5" Dec 02 12:05:32 crc kubenswrapper[4721]: I1202 12:05:32.465888 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f1e8e66a-9b25-4d2a-94b7-afc09915748a-apiservice-cert\") pod \"mariadb-operator-controller-manager-9f59d8d87-lgws5\" (UID: \"f1e8e66a-9b25-4d2a-94b7-afc09915748a\") " pod="openstack-operators/mariadb-operator-controller-manager-9f59d8d87-lgws5" Dec 02 12:05:32 crc kubenswrapper[4721]: I1202 12:05:32.465914 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f1e8e66a-9b25-4d2a-94b7-afc09915748a-webhook-cert\") pod \"mariadb-operator-controller-manager-9f59d8d87-lgws5\" (UID: \"f1e8e66a-9b25-4d2a-94b7-afc09915748a\") " pod="openstack-operators/mariadb-operator-controller-manager-9f59d8d87-lgws5" Dec 02 12:05:32 crc kubenswrapper[4721]: I1202 12:05:32.474100 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f1e8e66a-9b25-4d2a-94b7-afc09915748a-apiservice-cert\") pod \"mariadb-operator-controller-manager-9f59d8d87-lgws5\" (UID: \"f1e8e66a-9b25-4d2a-94b7-afc09915748a\") " pod="openstack-operators/mariadb-operator-controller-manager-9f59d8d87-lgws5" Dec 02 12:05:32 crc kubenswrapper[4721]: I1202 12:05:32.479012 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f1e8e66a-9b25-4d2a-94b7-afc09915748a-webhook-cert\") pod \"mariadb-operator-controller-manager-9f59d8d87-lgws5\" (UID: \"f1e8e66a-9b25-4d2a-94b7-afc09915748a\") " pod="openstack-operators/mariadb-operator-controller-manager-9f59d8d87-lgws5" Dec 02 12:05:32 crc kubenswrapper[4721]: I1202 12:05:32.486985 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgtgq\" (UniqueName: \"kubernetes.io/projected/f1e8e66a-9b25-4d2a-94b7-afc09915748a-kube-api-access-mgtgq\") pod \"mariadb-operator-controller-manager-9f59d8d87-lgws5\" (UID: \"f1e8e66a-9b25-4d2a-94b7-afc09915748a\") " pod="openstack-operators/mariadb-operator-controller-manager-9f59d8d87-lgws5" Dec 02 12:05:32 crc kubenswrapper[4721]: I1202 12:05:32.607168 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-9f59d8d87-lgws5" Dec 02 12:05:32 crc kubenswrapper[4721]: I1202 12:05:32.888123 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-9f59d8d87-lgws5"] Dec 02 12:05:33 crc kubenswrapper[4721]: I1202 12:05:33.313842 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-9f59d8d87-lgws5" event={"ID":"f1e8e66a-9b25-4d2a-94b7-afc09915748a","Type":"ContainerStarted","Data":"0769ebfc7115727c42f4d7fd2e07244ea4a4aacf322832ecface7390f3412adc"} Dec 02 12:05:37 crc kubenswrapper[4721]: I1202 12:05:37.335888 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-9f59d8d87-lgws5" event={"ID":"f1e8e66a-9b25-4d2a-94b7-afc09915748a","Type":"ContainerStarted","Data":"cf75139aee6cf879db1dac81cf7e3cddfc15c4af3d05ba15dfccb09a63a2bf8f"} Dec 02 12:05:37 crc kubenswrapper[4721]: I1202 12:05:37.336562 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-9f59d8d87-lgws5" Dec 02 12:05:37 crc kubenswrapper[4721]: I1202 12:05:37.357633 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-9f59d8d87-lgws5" podStartSLOduration=1.567378283 podStartE2EDuration="5.357614033s" podCreationTimestamp="2025-12-02 12:05:32 +0000 UTC" firstStartedPulling="2025-12-02 12:05:32.894425986 +0000 UTC m=+862.632976335" lastFinishedPulling="2025-12-02 12:05:36.684661746 +0000 UTC m=+866.423212085" observedRunningTime="2025-12-02 12:05:37.350994669 +0000 UTC m=+867.089545008" watchObservedRunningTime="2025-12-02 12:05:37.357614033 +0000 UTC m=+867.096164392" Dec 02 12:05:42 crc kubenswrapper[4721]: I1202 12:05:42.614038 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-9f59d8d87-lgws5" Dec 02 12:05:47 crc kubenswrapper[4721]: I1202 12:05:47.358836 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-index-hpzsg"] Dec 02 12:05:47 crc kubenswrapper[4721]: I1202 12:05:47.360220 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-hpzsg" Dec 02 12:05:47 crc kubenswrapper[4721]: I1202 12:05:47.362166 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-index-dockercfg-6xbs6" Dec 02 12:05:47 crc kubenswrapper[4721]: I1202 12:05:47.374377 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-hpzsg"] Dec 02 12:05:47 crc kubenswrapper[4721]: I1202 12:05:47.455156 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlh68\" (UniqueName: \"kubernetes.io/projected/bc87903c-e306-4c9c-a881-ed00eb454ae3-kube-api-access-rlh68\") pod \"infra-operator-index-hpzsg\" (UID: \"bc87903c-e306-4c9c-a881-ed00eb454ae3\") " pod="openstack-operators/infra-operator-index-hpzsg" Dec 02 12:05:47 crc kubenswrapper[4721]: I1202 12:05:47.556930 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlh68\" (UniqueName: \"kubernetes.io/projected/bc87903c-e306-4c9c-a881-ed00eb454ae3-kube-api-access-rlh68\") pod \"infra-operator-index-hpzsg\" (UID: \"bc87903c-e306-4c9c-a881-ed00eb454ae3\") " pod="openstack-operators/infra-operator-index-hpzsg" Dec 02 12:05:47 crc kubenswrapper[4721]: I1202 12:05:47.574997 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlh68\" (UniqueName: \"kubernetes.io/projected/bc87903c-e306-4c9c-a881-ed00eb454ae3-kube-api-access-rlh68\") pod \"infra-operator-index-hpzsg\" (UID: \"bc87903c-e306-4c9c-a881-ed00eb454ae3\") " pod="openstack-operators/infra-operator-index-hpzsg" Dec 02 12:05:47 crc kubenswrapper[4721]: I1202 12:05:47.676642 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-hpzsg" Dec 02 12:05:48 crc kubenswrapper[4721]: I1202 12:05:48.114224 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-hpzsg"] Dec 02 12:05:48 crc kubenswrapper[4721]: W1202 12:05:48.121829 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbc87903c_e306_4c9c_a881_ed00eb454ae3.slice/crio-9bfa86436c5ebe054226ba6710952f9730f15ae34d8bf7c9314f7ce3d9798995 WatchSource:0}: Error finding container 9bfa86436c5ebe054226ba6710952f9730f15ae34d8bf7c9314f7ce3d9798995: Status 404 returned error can't find the container with id 9bfa86436c5ebe054226ba6710952f9730f15ae34d8bf7c9314f7ce3d9798995 Dec 02 12:05:48 crc kubenswrapper[4721]: I1202 12:05:48.401381 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-hpzsg" event={"ID":"bc87903c-e306-4c9c-a881-ed00eb454ae3","Type":"ContainerStarted","Data":"9bfa86436c5ebe054226ba6710952f9730f15ae34d8bf7c9314f7ce3d9798995"} Dec 02 12:05:49 crc kubenswrapper[4721]: I1202 12:05:49.417864 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-hpzsg" event={"ID":"bc87903c-e306-4c9c-a881-ed00eb454ae3","Type":"ContainerStarted","Data":"ef47091654862e7ed40d72295c813354132502e344efd1f309f27d5fd0132d0f"} Dec 02 12:05:49 crc kubenswrapper[4721]: I1202 12:05:49.433983 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-index-hpzsg" podStartSLOduration=1.587360495 podStartE2EDuration="2.433966865s" podCreationTimestamp="2025-12-02 12:05:47 +0000 UTC" firstStartedPulling="2025-12-02 12:05:48.124471821 +0000 UTC m=+877.863022170" lastFinishedPulling="2025-12-02 12:05:48.971078201 +0000 UTC m=+878.709628540" observedRunningTime="2025-12-02 12:05:49.430416396 +0000 UTC m=+879.168966755" watchObservedRunningTime="2025-12-02 12:05:49.433966865 +0000 UTC m=+879.172517214" Dec 02 12:05:57 crc kubenswrapper[4721]: I1202 12:05:57.677404 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-index-hpzsg" Dec 02 12:05:57 crc kubenswrapper[4721]: I1202 12:05:57.678026 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/infra-operator-index-hpzsg" Dec 02 12:05:57 crc kubenswrapper[4721]: I1202 12:05:57.702908 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/infra-operator-index-hpzsg" Dec 02 12:05:58 crc kubenswrapper[4721]: I1202 12:05:58.512890 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-index-hpzsg" Dec 02 12:06:06 crc kubenswrapper[4721]: I1202 12:06:06.414980 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9"] Dec 02 12:06:06 crc kubenswrapper[4721]: I1202 12:06:06.417660 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9" Dec 02 12:06:06 crc kubenswrapper[4721]: I1202 12:06:06.425829 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-6fpcc" Dec 02 12:06:06 crc kubenswrapper[4721]: I1202 12:06:06.429965 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9"] Dec 02 12:06:06 crc kubenswrapper[4721]: I1202 12:06:06.499762 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvctf\" (UniqueName: \"kubernetes.io/projected/40facf43-ee2e-42cd-b22a-ae0504c0be95-kube-api-access-rvctf\") pod \"5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9\" (UID: \"40facf43-ee2e-42cd-b22a-ae0504c0be95\") " pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9" Dec 02 12:06:06 crc kubenswrapper[4721]: I1202 12:06:06.499827 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/40facf43-ee2e-42cd-b22a-ae0504c0be95-util\") pod \"5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9\" (UID: \"40facf43-ee2e-42cd-b22a-ae0504c0be95\") " pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9" Dec 02 12:06:06 crc kubenswrapper[4721]: I1202 12:06:06.500005 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/40facf43-ee2e-42cd-b22a-ae0504c0be95-bundle\") pod \"5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9\" (UID: \"40facf43-ee2e-42cd-b22a-ae0504c0be95\") " pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9" Dec 02 12:06:06 crc kubenswrapper[4721]: I1202 12:06:06.601122 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvctf\" (UniqueName: \"kubernetes.io/projected/40facf43-ee2e-42cd-b22a-ae0504c0be95-kube-api-access-rvctf\") pod \"5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9\" (UID: \"40facf43-ee2e-42cd-b22a-ae0504c0be95\") " pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9" Dec 02 12:06:06 crc kubenswrapper[4721]: I1202 12:06:06.601481 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/40facf43-ee2e-42cd-b22a-ae0504c0be95-util\") pod \"5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9\" (UID: \"40facf43-ee2e-42cd-b22a-ae0504c0be95\") " pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9" Dec 02 12:06:06 crc kubenswrapper[4721]: I1202 12:06:06.601512 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/40facf43-ee2e-42cd-b22a-ae0504c0be95-bundle\") pod \"5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9\" (UID: \"40facf43-ee2e-42cd-b22a-ae0504c0be95\") " pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9" Dec 02 12:06:06 crc kubenswrapper[4721]: I1202 12:06:06.601946 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/40facf43-ee2e-42cd-b22a-ae0504c0be95-util\") pod \"5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9\" (UID: \"40facf43-ee2e-42cd-b22a-ae0504c0be95\") " pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9" Dec 02 12:06:06 crc kubenswrapper[4721]: I1202 12:06:06.602031 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/40facf43-ee2e-42cd-b22a-ae0504c0be95-bundle\") pod \"5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9\" (UID: \"40facf43-ee2e-42cd-b22a-ae0504c0be95\") " pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9" Dec 02 12:06:06 crc kubenswrapper[4721]: I1202 12:06:06.620270 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvctf\" (UniqueName: \"kubernetes.io/projected/40facf43-ee2e-42cd-b22a-ae0504c0be95-kube-api-access-rvctf\") pod \"5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9\" (UID: \"40facf43-ee2e-42cd-b22a-ae0504c0be95\") " pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9" Dec 02 12:06:06 crc kubenswrapper[4721]: I1202 12:06:06.741316 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9" Dec 02 12:06:06 crc kubenswrapper[4721]: I1202 12:06:06.918264 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9"] Dec 02 12:06:07 crc kubenswrapper[4721]: I1202 12:06:07.525622 4721 generic.go:334] "Generic (PLEG): container finished" podID="40facf43-ee2e-42cd-b22a-ae0504c0be95" containerID="5f7af03addf6fbc441eb5c04f6ca014d9db4b4045a192146b05b7c1617cb20c6" exitCode=0 Dec 02 12:06:07 crc kubenswrapper[4721]: I1202 12:06:07.525665 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9" event={"ID":"40facf43-ee2e-42cd-b22a-ae0504c0be95","Type":"ContainerDied","Data":"5f7af03addf6fbc441eb5c04f6ca014d9db4b4045a192146b05b7c1617cb20c6"} Dec 02 12:06:07 crc kubenswrapper[4721]: I1202 12:06:07.525693 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9" event={"ID":"40facf43-ee2e-42cd-b22a-ae0504c0be95","Type":"ContainerStarted","Data":"779b6510a0cd1ba0cbe5b35cffbc9d45f866f17bea54d084716d9c6b9c380a03"} Dec 02 12:06:10 crc kubenswrapper[4721]: I1202 12:06:10.557435 4721 generic.go:334] "Generic (PLEG): container finished" podID="40facf43-ee2e-42cd-b22a-ae0504c0be95" containerID="d9aac66e1bee7ed2128bc3654eaebc452e06589349c317d9c97a90fddf5013cb" exitCode=0 Dec 02 12:06:10 crc kubenswrapper[4721]: I1202 12:06:10.557487 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9" event={"ID":"40facf43-ee2e-42cd-b22a-ae0504c0be95","Type":"ContainerDied","Data":"d9aac66e1bee7ed2128bc3654eaebc452e06589349c317d9c97a90fddf5013cb"} Dec 02 12:06:11 crc kubenswrapper[4721]: I1202 12:06:11.566269 4721 generic.go:334] "Generic (PLEG): container finished" podID="40facf43-ee2e-42cd-b22a-ae0504c0be95" containerID="2325e9c54e3ce65b76d6660c16911dc3967707bea0610b71982b95a045f08a45" exitCode=0 Dec 02 12:06:11 crc kubenswrapper[4721]: I1202 12:06:11.566365 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9" event={"ID":"40facf43-ee2e-42cd-b22a-ae0504c0be95","Type":"ContainerDied","Data":"2325e9c54e3ce65b76d6660c16911dc3967707bea0610b71982b95a045f08a45"} Dec 02 12:06:12 crc kubenswrapper[4721]: I1202 12:06:12.814666 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9" Dec 02 12:06:12 crc kubenswrapper[4721]: I1202 12:06:12.886089 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rvctf\" (UniqueName: \"kubernetes.io/projected/40facf43-ee2e-42cd-b22a-ae0504c0be95-kube-api-access-rvctf\") pod \"40facf43-ee2e-42cd-b22a-ae0504c0be95\" (UID: \"40facf43-ee2e-42cd-b22a-ae0504c0be95\") " Dec 02 12:06:12 crc kubenswrapper[4721]: I1202 12:06:12.886192 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/40facf43-ee2e-42cd-b22a-ae0504c0be95-bundle\") pod \"40facf43-ee2e-42cd-b22a-ae0504c0be95\" (UID: \"40facf43-ee2e-42cd-b22a-ae0504c0be95\") " Dec 02 12:06:12 crc kubenswrapper[4721]: I1202 12:06:12.886225 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/40facf43-ee2e-42cd-b22a-ae0504c0be95-util\") pod \"40facf43-ee2e-42cd-b22a-ae0504c0be95\" (UID: \"40facf43-ee2e-42cd-b22a-ae0504c0be95\") " Dec 02 12:06:12 crc kubenswrapper[4721]: I1202 12:06:12.887117 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40facf43-ee2e-42cd-b22a-ae0504c0be95-bundle" (OuterVolumeSpecName: "bundle") pod "40facf43-ee2e-42cd-b22a-ae0504c0be95" (UID: "40facf43-ee2e-42cd-b22a-ae0504c0be95"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:06:12 crc kubenswrapper[4721]: I1202 12:06:12.892418 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40facf43-ee2e-42cd-b22a-ae0504c0be95-kube-api-access-rvctf" (OuterVolumeSpecName: "kube-api-access-rvctf") pod "40facf43-ee2e-42cd-b22a-ae0504c0be95" (UID: "40facf43-ee2e-42cd-b22a-ae0504c0be95"). InnerVolumeSpecName "kube-api-access-rvctf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:06:12 crc kubenswrapper[4721]: I1202 12:06:12.902227 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40facf43-ee2e-42cd-b22a-ae0504c0be95-util" (OuterVolumeSpecName: "util") pod "40facf43-ee2e-42cd-b22a-ae0504c0be95" (UID: "40facf43-ee2e-42cd-b22a-ae0504c0be95"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:06:12 crc kubenswrapper[4721]: I1202 12:06:12.987522 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rvctf\" (UniqueName: \"kubernetes.io/projected/40facf43-ee2e-42cd-b22a-ae0504c0be95-kube-api-access-rvctf\") on node \"crc\" DevicePath \"\"" Dec 02 12:06:12 crc kubenswrapper[4721]: I1202 12:06:12.987550 4721 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/40facf43-ee2e-42cd-b22a-ae0504c0be95-util\") on node \"crc\" DevicePath \"\"" Dec 02 12:06:12 crc kubenswrapper[4721]: I1202 12:06:12.987561 4721 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/40facf43-ee2e-42cd-b22a-ae0504c0be95-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 12:06:13 crc kubenswrapper[4721]: I1202 12:06:13.582415 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9" event={"ID":"40facf43-ee2e-42cd-b22a-ae0504c0be95","Type":"ContainerDied","Data":"779b6510a0cd1ba0cbe5b35cffbc9d45f866f17bea54d084716d9c6b9c380a03"} Dec 02 12:06:13 crc kubenswrapper[4721]: I1202 12:06:13.582937 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="779b6510a0cd1ba0cbe5b35cffbc9d45f866f17bea54d084716d9c6b9c380a03" Dec 02 12:06:13 crc kubenswrapper[4721]: I1202 12:06:13.582468 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9" Dec 02 12:06:21 crc kubenswrapper[4721]: I1202 12:06:21.319600 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99"] Dec 02 12:06:21 crc kubenswrapper[4721]: E1202 12:06:21.320263 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40facf43-ee2e-42cd-b22a-ae0504c0be95" containerName="extract" Dec 02 12:06:21 crc kubenswrapper[4721]: I1202 12:06:21.320275 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="40facf43-ee2e-42cd-b22a-ae0504c0be95" containerName="extract" Dec 02 12:06:21 crc kubenswrapper[4721]: E1202 12:06:21.320285 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40facf43-ee2e-42cd-b22a-ae0504c0be95" containerName="pull" Dec 02 12:06:21 crc kubenswrapper[4721]: I1202 12:06:21.320292 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="40facf43-ee2e-42cd-b22a-ae0504c0be95" containerName="pull" Dec 02 12:06:21 crc kubenswrapper[4721]: E1202 12:06:21.320303 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40facf43-ee2e-42cd-b22a-ae0504c0be95" containerName="util" Dec 02 12:06:21 crc kubenswrapper[4721]: I1202 12:06:21.320309 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="40facf43-ee2e-42cd-b22a-ae0504c0be95" containerName="util" Dec 02 12:06:21 crc kubenswrapper[4721]: I1202 12:06:21.320416 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="40facf43-ee2e-42cd-b22a-ae0504c0be95" containerName="extract" Dec 02 12:06:21 crc kubenswrapper[4721]: I1202 12:06:21.320998 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99" Dec 02 12:06:21 crc kubenswrapper[4721]: I1202 12:06:21.323958 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-service-cert" Dec 02 12:06:21 crc kubenswrapper[4721]: I1202 12:06:21.324866 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-lbsvv" Dec 02 12:06:21 crc kubenswrapper[4721]: I1202 12:06:21.344571 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99"] Dec 02 12:06:21 crc kubenswrapper[4721]: I1202 12:06:21.409118 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5568x\" (UniqueName: \"kubernetes.io/projected/6259f8fd-9c16-4c09-8e11-213914fe35f8-kube-api-access-5568x\") pod \"infra-operator-controller-manager-77d9df5fdb-g6f99\" (UID: \"6259f8fd-9c16-4c09-8e11-213914fe35f8\") " pod="openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99" Dec 02 12:06:21 crc kubenswrapper[4721]: I1202 12:06:21.409373 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6259f8fd-9c16-4c09-8e11-213914fe35f8-apiservice-cert\") pod \"infra-operator-controller-manager-77d9df5fdb-g6f99\" (UID: \"6259f8fd-9c16-4c09-8e11-213914fe35f8\") " pod="openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99" Dec 02 12:06:21 crc kubenswrapper[4721]: I1202 12:06:21.409541 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6259f8fd-9c16-4c09-8e11-213914fe35f8-webhook-cert\") pod \"infra-operator-controller-manager-77d9df5fdb-g6f99\" (UID: \"6259f8fd-9c16-4c09-8e11-213914fe35f8\") " pod="openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99" Dec 02 12:06:21 crc kubenswrapper[4721]: I1202 12:06:21.511129 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6259f8fd-9c16-4c09-8e11-213914fe35f8-apiservice-cert\") pod \"infra-operator-controller-manager-77d9df5fdb-g6f99\" (UID: \"6259f8fd-9c16-4c09-8e11-213914fe35f8\") " pod="openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99" Dec 02 12:06:21 crc kubenswrapper[4721]: I1202 12:06:21.512698 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6259f8fd-9c16-4c09-8e11-213914fe35f8-webhook-cert\") pod \"infra-operator-controller-manager-77d9df5fdb-g6f99\" (UID: \"6259f8fd-9c16-4c09-8e11-213914fe35f8\") " pod="openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99" Dec 02 12:06:21 crc kubenswrapper[4721]: I1202 12:06:21.512860 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5568x\" (UniqueName: \"kubernetes.io/projected/6259f8fd-9c16-4c09-8e11-213914fe35f8-kube-api-access-5568x\") pod \"infra-operator-controller-manager-77d9df5fdb-g6f99\" (UID: \"6259f8fd-9c16-4c09-8e11-213914fe35f8\") " pod="openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99" Dec 02 12:06:21 crc kubenswrapper[4721]: I1202 12:06:21.517818 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6259f8fd-9c16-4c09-8e11-213914fe35f8-apiservice-cert\") pod \"infra-operator-controller-manager-77d9df5fdb-g6f99\" (UID: \"6259f8fd-9c16-4c09-8e11-213914fe35f8\") " pod="openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99" Dec 02 12:06:21 crc kubenswrapper[4721]: I1202 12:06:21.520803 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6259f8fd-9c16-4c09-8e11-213914fe35f8-webhook-cert\") pod \"infra-operator-controller-manager-77d9df5fdb-g6f99\" (UID: \"6259f8fd-9c16-4c09-8e11-213914fe35f8\") " pod="openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99" Dec 02 12:06:21 crc kubenswrapper[4721]: I1202 12:06:21.528338 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5568x\" (UniqueName: \"kubernetes.io/projected/6259f8fd-9c16-4c09-8e11-213914fe35f8-kube-api-access-5568x\") pod \"infra-operator-controller-manager-77d9df5fdb-g6f99\" (UID: \"6259f8fd-9c16-4c09-8e11-213914fe35f8\") " pod="openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99" Dec 02 12:06:21 crc kubenswrapper[4721]: I1202 12:06:21.637030 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99" Dec 02 12:06:21 crc kubenswrapper[4721]: I1202 12:06:21.841090 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99"] Dec 02 12:06:21 crc kubenswrapper[4721]: W1202 12:06:21.848488 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6259f8fd_9c16_4c09_8e11_213914fe35f8.slice/crio-ad163e3db09099121ed267db61b9d29929431b9e758bdebdf7692e4b66354366 WatchSource:0}: Error finding container ad163e3db09099121ed267db61b9d29929431b9e758bdebdf7692e4b66354366: Status 404 returned error can't find the container with id ad163e3db09099121ed267db61b9d29929431b9e758bdebdf7692e4b66354366 Dec 02 12:06:22 crc kubenswrapper[4721]: I1202 12:06:22.632622 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99" event={"ID":"6259f8fd-9c16-4c09-8e11-213914fe35f8","Type":"ContainerStarted","Data":"ad163e3db09099121ed267db61b9d29929431b9e758bdebdf7692e4b66354366"} Dec 02 12:06:24 crc kubenswrapper[4721]: I1202 12:06:24.648701 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99" event={"ID":"6259f8fd-9c16-4c09-8e11-213914fe35f8","Type":"ContainerStarted","Data":"f11050a9bb524ee1e05fd9a617a9605570aaea889638a62b9f51322be9a9b42e"} Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.420097 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/openstack-galera-0"] Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.421919 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-0" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.424313 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"kube-root-ca.crt" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.424802 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"openshift-service-ca.crt" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.425317 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"galera-openstack-dockercfg-p5jb4" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.427842 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"openstack-scripts" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.428161 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"openstack-config-data" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.439572 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/openstack-galera-1"] Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.440898 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-1" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.456964 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/openstack-galera-0"] Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.460825 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/openstack-galera-2"] Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.461965 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-2" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.466588 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/openstack-galera-1"] Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.482262 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/openstack-galera-2"] Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.505118 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-config-data-default\") pod \"openstack-galera-0\" (UID: \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.505165 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbjst\" (UniqueName: \"kubernetes.io/projected/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-kube-api-access-fbjst\") pod \"openstack-galera-0\" (UID: \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.505191 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-config-data-generated\") pod \"openstack-galera-0\" (UID: \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.505212 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-operator-scripts\") pod \"openstack-galera-0\" (UID: \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.505248 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-kolla-config\") pod \"openstack-galera-0\" (UID: \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.505280 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.606880 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/69d9011a-8441-4afa-904b-706f5a0ffe6c-kolla-config\") pod \"openstack-galera-2\" (UID: \"69d9011a-8441-4afa-904b-706f5a0ffe6c\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.606949 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/91d5a64b-5c90-4fba-9e6c-416071e412b3-config-data-default\") pod \"openstack-galera-1\" (UID: \"91d5a64b-5c90-4fba-9e6c-416071e412b3\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.606981 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrkb8\" (UniqueName: \"kubernetes.io/projected/69d9011a-8441-4afa-904b-706f5a0ffe6c-kube-api-access-jrkb8\") pod \"openstack-galera-2\" (UID: \"69d9011a-8441-4afa-904b-706f5a0ffe6c\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.607195 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/91d5a64b-5c90-4fba-9e6c-416071e412b3-kolla-config\") pod \"openstack-galera-1\" (UID: \"91d5a64b-5c90-4fba-9e6c-416071e412b3\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.607240 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91d5a64b-5c90-4fba-9e6c-416071e412b3-operator-scripts\") pod \"openstack-galera-1\" (UID: \"91d5a64b-5c90-4fba-9e6c-416071e412b3\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.607259 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/69d9011a-8441-4afa-904b-706f5a0ffe6c-config-data-generated\") pod \"openstack-galera-2\" (UID: \"69d9011a-8441-4afa-904b-706f5a0ffe6c\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.607313 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69d9011a-8441-4afa-904b-706f5a0ffe6c-operator-scripts\") pod \"openstack-galera-2\" (UID: \"69d9011a-8441-4afa-904b-706f5a0ffe6c\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.607368 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-config-data-default\") pod \"openstack-galera-0\" (UID: \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.607396 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-1\" (UID: \"91d5a64b-5c90-4fba-9e6c-416071e412b3\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.607421 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbjst\" (UniqueName: \"kubernetes.io/projected/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-kube-api-access-fbjst\") pod \"openstack-galera-0\" (UID: \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.607445 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dq4ng\" (UniqueName: \"kubernetes.io/projected/91d5a64b-5c90-4fba-9e6c-416071e412b3-kube-api-access-dq4ng\") pod \"openstack-galera-1\" (UID: \"91d5a64b-5c90-4fba-9e6c-416071e412b3\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.607467 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-config-data-generated\") pod \"openstack-galera-0\" (UID: \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.607485 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/91d5a64b-5c90-4fba-9e6c-416071e412b3-config-data-generated\") pod \"openstack-galera-1\" (UID: \"91d5a64b-5c90-4fba-9e6c-416071e412b3\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.607500 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-operator-scripts\") pod \"openstack-galera-0\" (UID: \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.607519 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-2\" (UID: \"69d9011a-8441-4afa-904b-706f5a0ffe6c\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.607550 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-kolla-config\") pod \"openstack-galera-0\" (UID: \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.607580 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/69d9011a-8441-4afa-904b-706f5a0ffe6c-config-data-default\") pod \"openstack-galera-2\" (UID: \"69d9011a-8441-4afa-904b-706f5a0ffe6c\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.607603 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.607971 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\") device mount path \"/mnt/openstack/pv04\"" pod="swift-kuttl-tests/openstack-galera-0" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.608714 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-config-data-generated\") pod \"openstack-galera-0\" (UID: \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.609301 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-config-data-default\") pod \"openstack-galera-0\" (UID: \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.609630 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-kolla-config\") pod \"openstack-galera-0\" (UID: \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.613884 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-operator-scripts\") pod \"openstack-galera-0\" (UID: \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.642786 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.645160 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbjst\" (UniqueName: \"kubernetes.io/projected/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-kube-api-access-fbjst\") pod \"openstack-galera-0\" (UID: \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\") " pod="swift-kuttl-tests/openstack-galera-0" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.708617 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91d5a64b-5c90-4fba-9e6c-416071e412b3-operator-scripts\") pod \"openstack-galera-1\" (UID: \"91d5a64b-5c90-4fba-9e6c-416071e412b3\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.708660 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/69d9011a-8441-4afa-904b-706f5a0ffe6c-config-data-generated\") pod \"openstack-galera-2\" (UID: \"69d9011a-8441-4afa-904b-706f5a0ffe6c\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.708705 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69d9011a-8441-4afa-904b-706f5a0ffe6c-operator-scripts\") pod \"openstack-galera-2\" (UID: \"69d9011a-8441-4afa-904b-706f5a0ffe6c\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.708750 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-1\" (UID: \"91d5a64b-5c90-4fba-9e6c-416071e412b3\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.708769 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dq4ng\" (UniqueName: \"kubernetes.io/projected/91d5a64b-5c90-4fba-9e6c-416071e412b3-kube-api-access-dq4ng\") pod \"openstack-galera-1\" (UID: \"91d5a64b-5c90-4fba-9e6c-416071e412b3\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.708911 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/91d5a64b-5c90-4fba-9e6c-416071e412b3-config-data-generated\") pod \"openstack-galera-1\" (UID: \"91d5a64b-5c90-4fba-9e6c-416071e412b3\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.708930 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-2\" (UID: \"69d9011a-8441-4afa-904b-706f5a0ffe6c\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.709181 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/69d9011a-8441-4afa-904b-706f5a0ffe6c-config-data-generated\") pod \"openstack-galera-2\" (UID: \"69d9011a-8441-4afa-904b-706f5a0ffe6c\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.709257 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-2\" (UID: \"69d9011a-8441-4afa-904b-706f5a0ffe6c\") device mount path \"/mnt/openstack/pv09\"" pod="swift-kuttl-tests/openstack-galera-2" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.709534 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/91d5a64b-5c90-4fba-9e6c-416071e412b3-config-data-generated\") pod \"openstack-galera-1\" (UID: \"91d5a64b-5c90-4fba-9e6c-416071e412b3\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.710645 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69d9011a-8441-4afa-904b-706f5a0ffe6c-operator-scripts\") pod \"openstack-galera-2\" (UID: \"69d9011a-8441-4afa-904b-706f5a0ffe6c\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.710716 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/69d9011a-8441-4afa-904b-706f5a0ffe6c-config-data-default\") pod \"openstack-galera-2\" (UID: \"69d9011a-8441-4afa-904b-706f5a0ffe6c\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.710754 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/69d9011a-8441-4afa-904b-706f5a0ffe6c-kolla-config\") pod \"openstack-galera-2\" (UID: \"69d9011a-8441-4afa-904b-706f5a0ffe6c\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.710783 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/91d5a64b-5c90-4fba-9e6c-416071e412b3-config-data-default\") pod \"openstack-galera-1\" (UID: \"91d5a64b-5c90-4fba-9e6c-416071e412b3\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.710804 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrkb8\" (UniqueName: \"kubernetes.io/projected/69d9011a-8441-4afa-904b-706f5a0ffe6c-kube-api-access-jrkb8\") pod \"openstack-galera-2\" (UID: \"69d9011a-8441-4afa-904b-706f5a0ffe6c\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.710829 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/91d5a64b-5c90-4fba-9e6c-416071e412b3-kolla-config\") pod \"openstack-galera-1\" (UID: \"91d5a64b-5c90-4fba-9e6c-416071e412b3\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.711044 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91d5a64b-5c90-4fba-9e6c-416071e412b3-operator-scripts\") pod \"openstack-galera-1\" (UID: \"91d5a64b-5c90-4fba-9e6c-416071e412b3\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.711894 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/69d9011a-8441-4afa-904b-706f5a0ffe6c-kolla-config\") pod \"openstack-galera-2\" (UID: \"69d9011a-8441-4afa-904b-706f5a0ffe6c\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.711924 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/91d5a64b-5c90-4fba-9e6c-416071e412b3-config-data-default\") pod \"openstack-galera-1\" (UID: \"91d5a64b-5c90-4fba-9e6c-416071e412b3\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.712932 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/91d5a64b-5c90-4fba-9e6c-416071e412b3-kolla-config\") pod \"openstack-galera-1\" (UID: \"91d5a64b-5c90-4fba-9e6c-416071e412b3\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.713255 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/69d9011a-8441-4afa-904b-706f5a0ffe6c-config-data-default\") pod \"openstack-galera-2\" (UID: \"69d9011a-8441-4afa-904b-706f5a0ffe6c\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.715666 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-1\" (UID: \"91d5a64b-5c90-4fba-9e6c-416071e412b3\") device mount path \"/mnt/openstack/pv02\"" pod="swift-kuttl-tests/openstack-galera-1" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.724978 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dq4ng\" (UniqueName: \"kubernetes.io/projected/91d5a64b-5c90-4fba-9e6c-416071e412b3-kube-api-access-dq4ng\") pod \"openstack-galera-1\" (UID: \"91d5a64b-5c90-4fba-9e6c-416071e412b3\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.730175 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrkb8\" (UniqueName: \"kubernetes.io/projected/69d9011a-8441-4afa-904b-706f5a0ffe6c-kube-api-access-jrkb8\") pod \"openstack-galera-2\" (UID: \"69d9011a-8441-4afa-904b-706f5a0ffe6c\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.731439 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-2\" (UID: \"69d9011a-8441-4afa-904b-706f5a0ffe6c\") " pod="swift-kuttl-tests/openstack-galera-2" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.746880 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-1\" (UID: \"91d5a64b-5c90-4fba-9e6c-416071e412b3\") " pod="swift-kuttl-tests/openstack-galera-1" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.787663 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-0" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.797884 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-1" Dec 02 12:06:28 crc kubenswrapper[4721]: I1202 12:06:28.810776 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-2" Dec 02 12:06:29 crc kubenswrapper[4721]: I1202 12:06:29.669463 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/openstack-galera-2"] Dec 02 12:06:29 crc kubenswrapper[4721]: I1202 12:06:29.710475 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/openstack-galera-1"] Dec 02 12:06:29 crc kubenswrapper[4721]: I1202 12:06:29.721745 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/openstack-galera-0"] Dec 02 12:06:30 crc kubenswrapper[4721]: I1202 12:06:30.730852 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-0" event={"ID":"46db9721-f25f-4725-b8ea-8eda2b7ce1b3","Type":"ContainerStarted","Data":"f055d4b2584ee31e4c4e6975535a2dbd011833a137aad79064486255acd75571"} Dec 02 12:06:30 crc kubenswrapper[4721]: I1202 12:06:30.736021 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-1" event={"ID":"91d5a64b-5c90-4fba-9e6c-416071e412b3","Type":"ContainerStarted","Data":"db9a63ebc6c9f9e6ab04641bc4366bd5b06d693206076a9d1b55882a8d148bac"} Dec 02 12:06:30 crc kubenswrapper[4721]: I1202 12:06:30.739234 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99" event={"ID":"6259f8fd-9c16-4c09-8e11-213914fe35f8","Type":"ContainerStarted","Data":"31b7e70f711b9faa91ebc381cae9bae3d03266a9cb60bf7e546419450ba1d737"} Dec 02 12:06:30 crc kubenswrapper[4721]: I1202 12:06:30.739449 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99" Dec 02 12:06:30 crc kubenswrapper[4721]: I1202 12:06:30.740950 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-2" event={"ID":"69d9011a-8441-4afa-904b-706f5a0ffe6c","Type":"ContainerStarted","Data":"0079ead3d7edd32fe163cd52005d8e9b967f39cc1cc8621968264f8065c39bf3"} Dec 02 12:06:30 crc kubenswrapper[4721]: I1202 12:06:30.742457 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99" Dec 02 12:06:30 crc kubenswrapper[4721]: I1202 12:06:30.768538 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99" podStartSLOduration=1.711342615 podStartE2EDuration="9.768503532s" podCreationTimestamp="2025-12-02 12:06:21 +0000 UTC" firstStartedPulling="2025-12-02 12:06:21.850100224 +0000 UTC m=+911.588650573" lastFinishedPulling="2025-12-02 12:06:29.907261141 +0000 UTC m=+919.645811490" observedRunningTime="2025-12-02 12:06:30.763585011 +0000 UTC m=+920.502135360" watchObservedRunningTime="2025-12-02 12:06:30.768503532 +0000 UTC m=+920.507053881" Dec 02 12:06:35 crc kubenswrapper[4721]: I1202 12:06:35.934059 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/memcached-0"] Dec 02 12:06:35 crc kubenswrapper[4721]: I1202 12:06:35.935207 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/memcached-0" Dec 02 12:06:35 crc kubenswrapper[4721]: I1202 12:06:35.941802 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"memcached-memcached-dockercfg-4brt8" Dec 02 12:06:35 crc kubenswrapper[4721]: I1202 12:06:35.942238 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"memcached-config-data" Dec 02 12:06:35 crc kubenswrapper[4721]: I1202 12:06:35.952397 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/memcached-0"] Dec 02 12:06:36 crc kubenswrapper[4721]: I1202 12:06:36.009157 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93-config-data\") pod \"memcached-0\" (UID: \"5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93\") " pod="swift-kuttl-tests/memcached-0" Dec 02 12:06:36 crc kubenswrapper[4721]: I1202 12:06:36.009210 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rkqz\" (UniqueName: \"kubernetes.io/projected/5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93-kube-api-access-2rkqz\") pod \"memcached-0\" (UID: \"5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93\") " pod="swift-kuttl-tests/memcached-0" Dec 02 12:06:36 crc kubenswrapper[4721]: I1202 12:06:36.009231 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93-kolla-config\") pod \"memcached-0\" (UID: \"5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93\") " pod="swift-kuttl-tests/memcached-0" Dec 02 12:06:36 crc kubenswrapper[4721]: I1202 12:06:36.110810 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rkqz\" (UniqueName: \"kubernetes.io/projected/5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93-kube-api-access-2rkqz\") pod \"memcached-0\" (UID: \"5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93\") " pod="swift-kuttl-tests/memcached-0" Dec 02 12:06:36 crc kubenswrapper[4721]: I1202 12:06:36.110853 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93-kolla-config\") pod \"memcached-0\" (UID: \"5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93\") " pod="swift-kuttl-tests/memcached-0" Dec 02 12:06:36 crc kubenswrapper[4721]: I1202 12:06:36.110922 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93-config-data\") pod \"memcached-0\" (UID: \"5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93\") " pod="swift-kuttl-tests/memcached-0" Dec 02 12:06:36 crc kubenswrapper[4721]: I1202 12:06:36.111589 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93-config-data\") pod \"memcached-0\" (UID: \"5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93\") " pod="swift-kuttl-tests/memcached-0" Dec 02 12:06:36 crc kubenswrapper[4721]: I1202 12:06:36.112310 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93-kolla-config\") pod \"memcached-0\" (UID: \"5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93\") " pod="swift-kuttl-tests/memcached-0" Dec 02 12:06:36 crc kubenswrapper[4721]: I1202 12:06:36.157213 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rkqz\" (UniqueName: \"kubernetes.io/projected/5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93-kube-api-access-2rkqz\") pod \"memcached-0\" (UID: \"5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93\") " pod="swift-kuttl-tests/memcached-0" Dec 02 12:06:36 crc kubenswrapper[4721]: I1202 12:06:36.267187 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/memcached-0" Dec 02 12:06:37 crc kubenswrapper[4721]: I1202 12:06:37.964657 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-s87cl"] Dec 02 12:06:37 crc kubenswrapper[4721]: I1202 12:06:37.966277 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s87cl" Dec 02 12:06:37 crc kubenswrapper[4721]: I1202 12:06:37.980087 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-s87cl"] Dec 02 12:06:38 crc kubenswrapper[4721]: I1202 12:06:38.039886 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abff05be-b826-4f09-8ede-6a30a486a9e3-utilities\") pod \"community-operators-s87cl\" (UID: \"abff05be-b826-4f09-8ede-6a30a486a9e3\") " pod="openshift-marketplace/community-operators-s87cl" Dec 02 12:06:38 crc kubenswrapper[4721]: I1202 12:06:38.039973 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhpn2\" (UniqueName: \"kubernetes.io/projected/abff05be-b826-4f09-8ede-6a30a486a9e3-kube-api-access-xhpn2\") pod \"community-operators-s87cl\" (UID: \"abff05be-b826-4f09-8ede-6a30a486a9e3\") " pod="openshift-marketplace/community-operators-s87cl" Dec 02 12:06:38 crc kubenswrapper[4721]: I1202 12:06:38.040036 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abff05be-b826-4f09-8ede-6a30a486a9e3-catalog-content\") pod \"community-operators-s87cl\" (UID: \"abff05be-b826-4f09-8ede-6a30a486a9e3\") " pod="openshift-marketplace/community-operators-s87cl" Dec 02 12:06:38 crc kubenswrapper[4721]: I1202 12:06:38.141596 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abff05be-b826-4f09-8ede-6a30a486a9e3-utilities\") pod \"community-operators-s87cl\" (UID: \"abff05be-b826-4f09-8ede-6a30a486a9e3\") " pod="openshift-marketplace/community-operators-s87cl" Dec 02 12:06:38 crc kubenswrapper[4721]: I1202 12:06:38.141945 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhpn2\" (UniqueName: \"kubernetes.io/projected/abff05be-b826-4f09-8ede-6a30a486a9e3-kube-api-access-xhpn2\") pod \"community-operators-s87cl\" (UID: \"abff05be-b826-4f09-8ede-6a30a486a9e3\") " pod="openshift-marketplace/community-operators-s87cl" Dec 02 12:06:38 crc kubenswrapper[4721]: I1202 12:06:38.142012 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abff05be-b826-4f09-8ede-6a30a486a9e3-catalog-content\") pod \"community-operators-s87cl\" (UID: \"abff05be-b826-4f09-8ede-6a30a486a9e3\") " pod="openshift-marketplace/community-operators-s87cl" Dec 02 12:06:38 crc kubenswrapper[4721]: I1202 12:06:38.142317 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abff05be-b826-4f09-8ede-6a30a486a9e3-utilities\") pod \"community-operators-s87cl\" (UID: \"abff05be-b826-4f09-8ede-6a30a486a9e3\") " pod="openshift-marketplace/community-operators-s87cl" Dec 02 12:06:38 crc kubenswrapper[4721]: I1202 12:06:38.142387 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abff05be-b826-4f09-8ede-6a30a486a9e3-catalog-content\") pod \"community-operators-s87cl\" (UID: \"abff05be-b826-4f09-8ede-6a30a486a9e3\") " pod="openshift-marketplace/community-operators-s87cl" Dec 02 12:06:38 crc kubenswrapper[4721]: I1202 12:06:38.164592 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhpn2\" (UniqueName: \"kubernetes.io/projected/abff05be-b826-4f09-8ede-6a30a486a9e3-kube-api-access-xhpn2\") pod \"community-operators-s87cl\" (UID: \"abff05be-b826-4f09-8ede-6a30a486a9e3\") " pod="openshift-marketplace/community-operators-s87cl" Dec 02 12:06:38 crc kubenswrapper[4721]: I1202 12:06:38.209272 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/memcached-0"] Dec 02 12:06:38 crc kubenswrapper[4721]: I1202 12:06:38.313182 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s87cl" Dec 02 12:06:38 crc kubenswrapper[4721]: I1202 12:06:38.571631 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-s87cl"] Dec 02 12:06:38 crc kubenswrapper[4721]: W1202 12:06:38.578504 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podabff05be_b826_4f09_8ede_6a30a486a9e3.slice/crio-2eb0820adedec389cbf8b88962434947f2e77ff1e616d3efc45f5b409e730691 WatchSource:0}: Error finding container 2eb0820adedec389cbf8b88962434947f2e77ff1e616d3efc45f5b409e730691: Status 404 returned error can't find the container with id 2eb0820adedec389cbf8b88962434947f2e77ff1e616d3efc45f5b409e730691 Dec 02 12:06:38 crc kubenswrapper[4721]: I1202 12:06:38.791297 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/memcached-0" event={"ID":"5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93","Type":"ContainerStarted","Data":"f9074cdc2b46e8b6f2aed9c0a79c0a72e71e8994593e03ea3c51431f56e2d7b9"} Dec 02 12:06:38 crc kubenswrapper[4721]: I1202 12:06:38.792835 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-1" event={"ID":"91d5a64b-5c90-4fba-9e6c-416071e412b3","Type":"ContainerStarted","Data":"40508839ff7c4ec3bab04ada05d57b9c0f9ff6f42340cbc4cbde8a7956279fb9"} Dec 02 12:06:38 crc kubenswrapper[4721]: I1202 12:06:38.795018 4721 generic.go:334] "Generic (PLEG): container finished" podID="abff05be-b826-4f09-8ede-6a30a486a9e3" containerID="b24ae59cc72aa18af8aa320ed9133a73e5a5a48606f96932dd658da6d7ef8acd" exitCode=0 Dec 02 12:06:38 crc kubenswrapper[4721]: I1202 12:06:38.795116 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s87cl" event={"ID":"abff05be-b826-4f09-8ede-6a30a486a9e3","Type":"ContainerDied","Data":"b24ae59cc72aa18af8aa320ed9133a73e5a5a48606f96932dd658da6d7ef8acd"} Dec 02 12:06:38 crc kubenswrapper[4721]: I1202 12:06:38.795148 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s87cl" event={"ID":"abff05be-b826-4f09-8ede-6a30a486a9e3","Type":"ContainerStarted","Data":"2eb0820adedec389cbf8b88962434947f2e77ff1e616d3efc45f5b409e730691"} Dec 02 12:06:38 crc kubenswrapper[4721]: I1202 12:06:38.799218 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-2" event={"ID":"69d9011a-8441-4afa-904b-706f5a0ffe6c","Type":"ContainerStarted","Data":"312c29e2155e60a4937c7648fbbb3c5e46447fe6928cde19289282af63e58102"} Dec 02 12:06:38 crc kubenswrapper[4721]: I1202 12:06:38.801799 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-0" event={"ID":"46db9721-f25f-4725-b8ea-8eda2b7ce1b3","Type":"ContainerStarted","Data":"db916a79f327d46a3d34c7694ea9f2b6b60fedcfe05c09e8fbdbfcea06065517"} Dec 02 12:06:39 crc kubenswrapper[4721]: I1202 12:06:39.772772 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-vf5hx"] Dec 02 12:06:39 crc kubenswrapper[4721]: I1202 12:06:39.773505 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-vf5hx" Dec 02 12:06:39 crc kubenswrapper[4721]: I1202 12:06:39.775894 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-index-dockercfg-2pssg" Dec 02 12:06:39 crc kubenswrapper[4721]: I1202 12:06:39.790988 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-vf5hx"] Dec 02 12:06:39 crc kubenswrapper[4721]: I1202 12:06:39.968272 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ccdb\" (UniqueName: \"kubernetes.io/projected/00dd52ed-18df-40f3-a396-9c4136785284-kube-api-access-5ccdb\") pod \"rabbitmq-cluster-operator-index-vf5hx\" (UID: \"00dd52ed-18df-40f3-a396-9c4136785284\") " pod="openstack-operators/rabbitmq-cluster-operator-index-vf5hx" Dec 02 12:06:40 crc kubenswrapper[4721]: I1202 12:06:40.069956 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ccdb\" (UniqueName: \"kubernetes.io/projected/00dd52ed-18df-40f3-a396-9c4136785284-kube-api-access-5ccdb\") pod \"rabbitmq-cluster-operator-index-vf5hx\" (UID: \"00dd52ed-18df-40f3-a396-9c4136785284\") " pod="openstack-operators/rabbitmq-cluster-operator-index-vf5hx" Dec 02 12:06:40 crc kubenswrapper[4721]: I1202 12:06:40.098801 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ccdb\" (UniqueName: \"kubernetes.io/projected/00dd52ed-18df-40f3-a396-9c4136785284-kube-api-access-5ccdb\") pod \"rabbitmq-cluster-operator-index-vf5hx\" (UID: \"00dd52ed-18df-40f3-a396-9c4136785284\") " pod="openstack-operators/rabbitmq-cluster-operator-index-vf5hx" Dec 02 12:06:40 crc kubenswrapper[4721]: I1202 12:06:40.152447 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-vf5hx" Dec 02 12:06:40 crc kubenswrapper[4721]: I1202 12:06:40.828280 4721 generic.go:334] "Generic (PLEG): container finished" podID="abff05be-b826-4f09-8ede-6a30a486a9e3" containerID="57dc7a2fa08b3be09412ec1b1d3baa3761e14a58ead9315171bfbb241fd79b45" exitCode=0 Dec 02 12:06:40 crc kubenswrapper[4721]: I1202 12:06:40.828440 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s87cl" event={"ID":"abff05be-b826-4f09-8ede-6a30a486a9e3","Type":"ContainerDied","Data":"57dc7a2fa08b3be09412ec1b1d3baa3761e14a58ead9315171bfbb241fd79b45"} Dec 02 12:06:40 crc kubenswrapper[4721]: I1202 12:06:40.862694 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-vf5hx"] Dec 02 12:06:41 crc kubenswrapper[4721]: I1202 12:06:41.834053 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-vf5hx" event={"ID":"00dd52ed-18df-40f3-a396-9c4136785284","Type":"ContainerStarted","Data":"ac3cdf8bdfaf18fd182b256cb61a928aec8e4e9cd49563fa31d49b865ce01a6e"} Dec 02 12:06:42 crc kubenswrapper[4721]: I1202 12:06:42.840780 4721 generic.go:334] "Generic (PLEG): container finished" podID="91d5a64b-5c90-4fba-9e6c-416071e412b3" containerID="40508839ff7c4ec3bab04ada05d57b9c0f9ff6f42340cbc4cbde8a7956279fb9" exitCode=0 Dec 02 12:06:42 crc kubenswrapper[4721]: I1202 12:06:42.845873 4721 generic.go:334] "Generic (PLEG): container finished" podID="69d9011a-8441-4afa-904b-706f5a0ffe6c" containerID="312c29e2155e60a4937c7648fbbb3c5e46447fe6928cde19289282af63e58102" exitCode=0 Dec 02 12:06:42 crc kubenswrapper[4721]: I1202 12:06:42.847508 4721 generic.go:334] "Generic (PLEG): container finished" podID="46db9721-f25f-4725-b8ea-8eda2b7ce1b3" containerID="db916a79f327d46a3d34c7694ea9f2b6b60fedcfe05c09e8fbdbfcea06065517" exitCode=0 Dec 02 12:06:42 crc kubenswrapper[4721]: I1202 12:06:42.847816 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-1" event={"ID":"91d5a64b-5c90-4fba-9e6c-416071e412b3","Type":"ContainerDied","Data":"40508839ff7c4ec3bab04ada05d57b9c0f9ff6f42340cbc4cbde8a7956279fb9"} Dec 02 12:06:42 crc kubenswrapper[4721]: I1202 12:06:42.847854 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s87cl" event={"ID":"abff05be-b826-4f09-8ede-6a30a486a9e3","Type":"ContainerStarted","Data":"b2033ab25cef0c89922c91258d8c6c55549cbb351f4295a2ded8a264e79e2fcd"} Dec 02 12:06:42 crc kubenswrapper[4721]: I1202 12:06:42.847866 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-2" event={"ID":"69d9011a-8441-4afa-904b-706f5a0ffe6c","Type":"ContainerDied","Data":"312c29e2155e60a4937c7648fbbb3c5e46447fe6928cde19289282af63e58102"} Dec 02 12:06:42 crc kubenswrapper[4721]: I1202 12:06:42.847876 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-0" event={"ID":"46db9721-f25f-4725-b8ea-8eda2b7ce1b3","Type":"ContainerDied","Data":"db916a79f327d46a3d34c7694ea9f2b6b60fedcfe05c09e8fbdbfcea06065517"} Dec 02 12:06:42 crc kubenswrapper[4721]: I1202 12:06:42.848670 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/memcached-0" event={"ID":"5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93","Type":"ContainerStarted","Data":"fdf853adc3af7a138953946a1a769d8a059b52b64d216157b4d6da2c575ce21a"} Dec 02 12:06:42 crc kubenswrapper[4721]: I1202 12:06:42.848771 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/memcached-0" Dec 02 12:06:42 crc kubenswrapper[4721]: I1202 12:06:42.869041 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-s87cl" podStartSLOduration=2.721936969 podStartE2EDuration="5.869022537s" podCreationTimestamp="2025-12-02 12:06:37 +0000 UTC" firstStartedPulling="2025-12-02 12:06:38.796849242 +0000 UTC m=+928.535399591" lastFinishedPulling="2025-12-02 12:06:41.94393482 +0000 UTC m=+931.682485159" observedRunningTime="2025-12-02 12:06:42.866443547 +0000 UTC m=+932.604993896" watchObservedRunningTime="2025-12-02 12:06:42.869022537 +0000 UTC m=+932.607572886" Dec 02 12:06:42 crc kubenswrapper[4721]: I1202 12:06:42.960156 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/memcached-0" podStartSLOduration=4.236295134 podStartE2EDuration="7.96013074s" podCreationTimestamp="2025-12-02 12:06:35 +0000 UTC" firstStartedPulling="2025-12-02 12:06:38.218091103 +0000 UTC m=+927.956641452" lastFinishedPulling="2025-12-02 12:06:41.941926709 +0000 UTC m=+931.680477058" observedRunningTime="2025-12-02 12:06:42.941523867 +0000 UTC m=+932.680074226" watchObservedRunningTime="2025-12-02 12:06:42.96013074 +0000 UTC m=+932.698681089" Dec 02 12:06:43 crc kubenswrapper[4721]: I1202 12:06:43.858778 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-0" event={"ID":"46db9721-f25f-4725-b8ea-8eda2b7ce1b3","Type":"ContainerStarted","Data":"299da88363d9f9d45361fd72fc2bb5bd9e4c749a6d935d47ed2253c4a893d2da"} Dec 02 12:06:43 crc kubenswrapper[4721]: I1202 12:06:43.884791 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/openstack-galera-0" podStartSLOduration=8.896936399 podStartE2EDuration="16.884762322s" podCreationTimestamp="2025-12-02 12:06:27 +0000 UTC" firstStartedPulling="2025-12-02 12:06:29.87017764 +0000 UTC m=+919.608727989" lastFinishedPulling="2025-12-02 12:06:37.858003563 +0000 UTC m=+927.596553912" observedRunningTime="2025-12-02 12:06:43.882015627 +0000 UTC m=+933.620565976" watchObservedRunningTime="2025-12-02 12:06:43.884762322 +0000 UTC m=+933.623312671" Dec 02 12:06:45 crc kubenswrapper[4721]: I1202 12:06:45.353369 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-vf5hx"] Dec 02 12:06:46 crc kubenswrapper[4721]: I1202 12:06:46.157559 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-fvc2h"] Dec 02 12:06:46 crc kubenswrapper[4721]: I1202 12:06:46.158452 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-fvc2h" Dec 02 12:06:46 crc kubenswrapper[4721]: I1202 12:06:46.175152 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-fvc2h"] Dec 02 12:06:46 crc kubenswrapper[4721]: I1202 12:06:46.253870 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jttxk\" (UniqueName: \"kubernetes.io/projected/7381a93e-ca9b-4d2b-ae20-c110ebab577a-kube-api-access-jttxk\") pod \"rabbitmq-cluster-operator-index-fvc2h\" (UID: \"7381a93e-ca9b-4d2b-ae20-c110ebab577a\") " pod="openstack-operators/rabbitmq-cluster-operator-index-fvc2h" Dec 02 12:06:46 crc kubenswrapper[4721]: I1202 12:06:46.355942 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jttxk\" (UniqueName: \"kubernetes.io/projected/7381a93e-ca9b-4d2b-ae20-c110ebab577a-kube-api-access-jttxk\") pod \"rabbitmq-cluster-operator-index-fvc2h\" (UID: \"7381a93e-ca9b-4d2b-ae20-c110ebab577a\") " pod="openstack-operators/rabbitmq-cluster-operator-index-fvc2h" Dec 02 12:06:46 crc kubenswrapper[4721]: I1202 12:06:46.378023 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jttxk\" (UniqueName: \"kubernetes.io/projected/7381a93e-ca9b-4d2b-ae20-c110ebab577a-kube-api-access-jttxk\") pod \"rabbitmq-cluster-operator-index-fvc2h\" (UID: \"7381a93e-ca9b-4d2b-ae20-c110ebab577a\") " pod="openstack-operators/rabbitmq-cluster-operator-index-fvc2h" Dec 02 12:06:46 crc kubenswrapper[4721]: I1202 12:06:46.474772 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-fvc2h" Dec 02 12:06:48 crc kubenswrapper[4721]: I1202 12:06:48.131017 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-fvc2h"] Dec 02 12:06:48 crc kubenswrapper[4721]: W1202 12:06:48.138005 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7381a93e_ca9b_4d2b_ae20_c110ebab577a.slice/crio-d8d98afd4a885f92f307460653184f953a18d406f5aa18c508631d006a83aa6e WatchSource:0}: Error finding container d8d98afd4a885f92f307460653184f953a18d406f5aa18c508631d006a83aa6e: Status 404 returned error can't find the container with id d8d98afd4a885f92f307460653184f953a18d406f5aa18c508631d006a83aa6e Dec 02 12:06:48 crc kubenswrapper[4721]: I1202 12:06:48.314016 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-s87cl" Dec 02 12:06:48 crc kubenswrapper[4721]: I1202 12:06:48.314384 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-s87cl" Dec 02 12:06:48 crc kubenswrapper[4721]: I1202 12:06:48.357924 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-s87cl" Dec 02 12:06:48 crc kubenswrapper[4721]: I1202 12:06:48.788716 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/openstack-galera-0" Dec 02 12:06:48 crc kubenswrapper[4721]: I1202 12:06:48.788773 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="swift-kuttl-tests/openstack-galera-0" Dec 02 12:06:48 crc kubenswrapper[4721]: I1202 12:06:48.899164 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-vf5hx" event={"ID":"00dd52ed-18df-40f3-a396-9c4136785284","Type":"ContainerStarted","Data":"eacdbe9e67d6dc9bf300b6755b97bd2598cc4f2f4b2d68ebc8b9810c15367797"} Dec 02 12:06:48 crc kubenswrapper[4721]: I1202 12:06:48.899306 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/rabbitmq-cluster-operator-index-vf5hx" podUID="00dd52ed-18df-40f3-a396-9c4136785284" containerName="registry-server" containerID="cri-o://eacdbe9e67d6dc9bf300b6755b97bd2598cc4f2f4b2d68ebc8b9810c15367797" gracePeriod=2 Dec 02 12:06:48 crc kubenswrapper[4721]: I1202 12:06:48.901841 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-1" event={"ID":"91d5a64b-5c90-4fba-9e6c-416071e412b3","Type":"ContainerStarted","Data":"1e594e97f790515c0f3aad49aef85f5150f7494e2dba574c1661a9c5ca3bdb09"} Dec 02 12:06:48 crc kubenswrapper[4721]: I1202 12:06:48.908866 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-2" event={"ID":"69d9011a-8441-4afa-904b-706f5a0ffe6c","Type":"ContainerStarted","Data":"e5f72d9edc92b63386400f754f50f6b93a0bbde29c05804cef4803633ed756bd"} Dec 02 12:06:48 crc kubenswrapper[4721]: I1202 12:06:48.912145 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-fvc2h" event={"ID":"7381a93e-ca9b-4d2b-ae20-c110ebab577a","Type":"ContainerStarted","Data":"3d1a4dff284a80185eeed769405d41326b0cc27bbb4d82ff51c16da5c57a9ef6"} Dec 02 12:06:48 crc kubenswrapper[4721]: I1202 12:06:48.912175 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-fvc2h" event={"ID":"7381a93e-ca9b-4d2b-ae20-c110ebab577a","Type":"ContainerStarted","Data":"d8d98afd4a885f92f307460653184f953a18d406f5aa18c508631d006a83aa6e"} Dec 02 12:06:48 crc kubenswrapper[4721]: I1202 12:06:48.937415 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-index-vf5hx" podStartSLOduration=3.090553692 podStartE2EDuration="9.937391827s" podCreationTimestamp="2025-12-02 12:06:39 +0000 UTC" firstStartedPulling="2025-12-02 12:06:40.886553324 +0000 UTC m=+930.625103673" lastFinishedPulling="2025-12-02 12:06:47.733391459 +0000 UTC m=+937.471941808" observedRunningTime="2025-12-02 12:06:48.93162942 +0000 UTC m=+938.670179779" watchObservedRunningTime="2025-12-02 12:06:48.937391827 +0000 UTC m=+938.675942196" Dec 02 12:06:48 crc kubenswrapper[4721]: I1202 12:06:48.957003 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/openstack-galera-1" podStartSLOduration=13.988684008 podStartE2EDuration="21.9569779s" podCreationTimestamp="2025-12-02 12:06:27 +0000 UTC" firstStartedPulling="2025-12-02 12:06:29.868299242 +0000 UTC m=+919.606849631" lastFinishedPulling="2025-12-02 12:06:37.836593174 +0000 UTC m=+927.575143523" observedRunningTime="2025-12-02 12:06:48.949087707 +0000 UTC m=+938.687638066" watchObservedRunningTime="2025-12-02 12:06:48.9569779 +0000 UTC m=+938.695528259" Dec 02 12:06:48 crc kubenswrapper[4721]: I1202 12:06:48.969222 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-index-fvc2h" podStartSLOduration=2.466180637 podStartE2EDuration="2.969200516s" podCreationTimestamp="2025-12-02 12:06:46 +0000 UTC" firstStartedPulling="2025-12-02 12:06:48.142622961 +0000 UTC m=+937.881173310" lastFinishedPulling="2025-12-02 12:06:48.64564281 +0000 UTC m=+938.384193189" observedRunningTime="2025-12-02 12:06:48.964564754 +0000 UTC m=+938.703115113" watchObservedRunningTime="2025-12-02 12:06:48.969200516 +0000 UTC m=+938.707750885" Dec 02 12:06:48 crc kubenswrapper[4721]: I1202 12:06:48.970062 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-s87cl" Dec 02 12:06:48 crc kubenswrapper[4721]: I1202 12:06:48.986172 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/openstack-galera-2" podStartSLOduration=14.025141531 podStartE2EDuration="21.986154338s" podCreationTimestamp="2025-12-02 12:06:27 +0000 UTC" firstStartedPulling="2025-12-02 12:06:29.898513972 +0000 UTC m=+919.637064321" lastFinishedPulling="2025-12-02 12:06:37.859526779 +0000 UTC m=+927.598077128" observedRunningTime="2025-12-02 12:06:48.982043451 +0000 UTC m=+938.720593810" watchObservedRunningTime="2025-12-02 12:06:48.986154338 +0000 UTC m=+938.724704697" Dec 02 12:06:49 crc kubenswrapper[4721]: I1202 12:06:49.275318 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-vf5hx" Dec 02 12:06:49 crc kubenswrapper[4721]: I1202 12:06:49.298999 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5ccdb\" (UniqueName: \"kubernetes.io/projected/00dd52ed-18df-40f3-a396-9c4136785284-kube-api-access-5ccdb\") pod \"00dd52ed-18df-40f3-a396-9c4136785284\" (UID: \"00dd52ed-18df-40f3-a396-9c4136785284\") " Dec 02 12:06:49 crc kubenswrapper[4721]: I1202 12:06:49.304918 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00dd52ed-18df-40f3-a396-9c4136785284-kube-api-access-5ccdb" (OuterVolumeSpecName: "kube-api-access-5ccdb") pod "00dd52ed-18df-40f3-a396-9c4136785284" (UID: "00dd52ed-18df-40f3-a396-9c4136785284"). InnerVolumeSpecName "kube-api-access-5ccdb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:06:49 crc kubenswrapper[4721]: I1202 12:06:49.400259 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5ccdb\" (UniqueName: \"kubernetes.io/projected/00dd52ed-18df-40f3-a396-9c4136785284-kube-api-access-5ccdb\") on node \"crc\" DevicePath \"\"" Dec 02 12:06:49 crc kubenswrapper[4721]: I1202 12:06:49.918629 4721 generic.go:334] "Generic (PLEG): container finished" podID="00dd52ed-18df-40f3-a396-9c4136785284" containerID="eacdbe9e67d6dc9bf300b6755b97bd2598cc4f2f4b2d68ebc8b9810c15367797" exitCode=0 Dec 02 12:06:49 crc kubenswrapper[4721]: I1202 12:06:49.918704 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-vf5hx" Dec 02 12:06:49 crc kubenswrapper[4721]: I1202 12:06:49.918736 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-vf5hx" event={"ID":"00dd52ed-18df-40f3-a396-9c4136785284","Type":"ContainerDied","Data":"eacdbe9e67d6dc9bf300b6755b97bd2598cc4f2f4b2d68ebc8b9810c15367797"} Dec 02 12:06:49 crc kubenswrapper[4721]: I1202 12:06:49.918805 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-vf5hx" event={"ID":"00dd52ed-18df-40f3-a396-9c4136785284","Type":"ContainerDied","Data":"ac3cdf8bdfaf18fd182b256cb61a928aec8e4e9cd49563fa31d49b865ce01a6e"} Dec 02 12:06:49 crc kubenswrapper[4721]: I1202 12:06:49.918826 4721 scope.go:117] "RemoveContainer" containerID="eacdbe9e67d6dc9bf300b6755b97bd2598cc4f2f4b2d68ebc8b9810c15367797" Dec 02 12:06:49 crc kubenswrapper[4721]: I1202 12:06:49.948122 4721 scope.go:117] "RemoveContainer" containerID="eacdbe9e67d6dc9bf300b6755b97bd2598cc4f2f4b2d68ebc8b9810c15367797" Dec 02 12:06:49 crc kubenswrapper[4721]: E1202 12:06:49.948533 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eacdbe9e67d6dc9bf300b6755b97bd2598cc4f2f4b2d68ebc8b9810c15367797\": container with ID starting with eacdbe9e67d6dc9bf300b6755b97bd2598cc4f2f4b2d68ebc8b9810c15367797 not found: ID does not exist" containerID="eacdbe9e67d6dc9bf300b6755b97bd2598cc4f2f4b2d68ebc8b9810c15367797" Dec 02 12:06:49 crc kubenswrapper[4721]: I1202 12:06:49.948562 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eacdbe9e67d6dc9bf300b6755b97bd2598cc4f2f4b2d68ebc8b9810c15367797"} err="failed to get container status \"eacdbe9e67d6dc9bf300b6755b97bd2598cc4f2f4b2d68ebc8b9810c15367797\": rpc error: code = NotFound desc = could not find container \"eacdbe9e67d6dc9bf300b6755b97bd2598cc4f2f4b2d68ebc8b9810c15367797\": container with ID starting with eacdbe9e67d6dc9bf300b6755b97bd2598cc4f2f4b2d68ebc8b9810c15367797 not found: ID does not exist" Dec 02 12:06:49 crc kubenswrapper[4721]: I1202 12:06:49.949175 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-vf5hx"] Dec 02 12:06:49 crc kubenswrapper[4721]: I1202 12:06:49.953874 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-vf5hx"] Dec 02 12:06:50 crc kubenswrapper[4721]: I1202 12:06:50.759181 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-s87cl"] Dec 02 12:06:50 crc kubenswrapper[4721]: I1202 12:06:50.847699 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00dd52ed-18df-40f3-a396-9c4136785284" path="/var/lib/kubelet/pods/00dd52ed-18df-40f3-a396-9c4136785284/volumes" Dec 02 12:06:50 crc kubenswrapper[4721]: I1202 12:06:50.924721 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-s87cl" podUID="abff05be-b826-4f09-8ede-6a30a486a9e3" containerName="registry-server" containerID="cri-o://b2033ab25cef0c89922c91258d8c6c55549cbb351f4295a2ded8a264e79e2fcd" gracePeriod=2 Dec 02 12:06:51 crc kubenswrapper[4721]: I1202 12:06:51.271535 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/memcached-0" Dec 02 12:06:51 crc kubenswrapper[4721]: I1202 12:06:51.813853 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s87cl" Dec 02 12:06:51 crc kubenswrapper[4721]: I1202 12:06:51.931276 4721 generic.go:334] "Generic (PLEG): container finished" podID="abff05be-b826-4f09-8ede-6a30a486a9e3" containerID="b2033ab25cef0c89922c91258d8c6c55549cbb351f4295a2ded8a264e79e2fcd" exitCode=0 Dec 02 12:06:51 crc kubenswrapper[4721]: I1202 12:06:51.931312 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s87cl" event={"ID":"abff05be-b826-4f09-8ede-6a30a486a9e3","Type":"ContainerDied","Data":"b2033ab25cef0c89922c91258d8c6c55549cbb351f4295a2ded8a264e79e2fcd"} Dec 02 12:06:51 crc kubenswrapper[4721]: I1202 12:06:51.931335 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s87cl" event={"ID":"abff05be-b826-4f09-8ede-6a30a486a9e3","Type":"ContainerDied","Data":"2eb0820adedec389cbf8b88962434947f2e77ff1e616d3efc45f5b409e730691"} Dec 02 12:06:51 crc kubenswrapper[4721]: I1202 12:06:51.931368 4721 scope.go:117] "RemoveContainer" containerID="b2033ab25cef0c89922c91258d8c6c55549cbb351f4295a2ded8a264e79e2fcd" Dec 02 12:06:51 crc kubenswrapper[4721]: I1202 12:06:51.931392 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s87cl" Dec 02 12:06:51 crc kubenswrapper[4721]: I1202 12:06:51.937781 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abff05be-b826-4f09-8ede-6a30a486a9e3-utilities\") pod \"abff05be-b826-4f09-8ede-6a30a486a9e3\" (UID: \"abff05be-b826-4f09-8ede-6a30a486a9e3\") " Dec 02 12:06:51 crc kubenswrapper[4721]: I1202 12:06:51.937824 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xhpn2\" (UniqueName: \"kubernetes.io/projected/abff05be-b826-4f09-8ede-6a30a486a9e3-kube-api-access-xhpn2\") pod \"abff05be-b826-4f09-8ede-6a30a486a9e3\" (UID: \"abff05be-b826-4f09-8ede-6a30a486a9e3\") " Dec 02 12:06:51 crc kubenswrapper[4721]: I1202 12:06:51.937860 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abff05be-b826-4f09-8ede-6a30a486a9e3-catalog-content\") pod \"abff05be-b826-4f09-8ede-6a30a486a9e3\" (UID: \"abff05be-b826-4f09-8ede-6a30a486a9e3\") " Dec 02 12:06:51 crc kubenswrapper[4721]: I1202 12:06:51.938599 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abff05be-b826-4f09-8ede-6a30a486a9e3-utilities" (OuterVolumeSpecName: "utilities") pod "abff05be-b826-4f09-8ede-6a30a486a9e3" (UID: "abff05be-b826-4f09-8ede-6a30a486a9e3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:06:51 crc kubenswrapper[4721]: I1202 12:06:51.945686 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abff05be-b826-4f09-8ede-6a30a486a9e3-kube-api-access-xhpn2" (OuterVolumeSpecName: "kube-api-access-xhpn2") pod "abff05be-b826-4f09-8ede-6a30a486a9e3" (UID: "abff05be-b826-4f09-8ede-6a30a486a9e3"). InnerVolumeSpecName "kube-api-access-xhpn2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:06:51 crc kubenswrapper[4721]: I1202 12:06:51.952835 4721 scope.go:117] "RemoveContainer" containerID="57dc7a2fa08b3be09412ec1b1d3baa3761e14a58ead9315171bfbb241fd79b45" Dec 02 12:06:51 crc kubenswrapper[4721]: I1202 12:06:51.977033 4721 scope.go:117] "RemoveContainer" containerID="b24ae59cc72aa18af8aa320ed9133a73e5a5a48606f96932dd658da6d7ef8acd" Dec 02 12:06:51 crc kubenswrapper[4721]: I1202 12:06:51.991929 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abff05be-b826-4f09-8ede-6a30a486a9e3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "abff05be-b826-4f09-8ede-6a30a486a9e3" (UID: "abff05be-b826-4f09-8ede-6a30a486a9e3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:06:51 crc kubenswrapper[4721]: I1202 12:06:51.992285 4721 scope.go:117] "RemoveContainer" containerID="b2033ab25cef0c89922c91258d8c6c55549cbb351f4295a2ded8a264e79e2fcd" Dec 02 12:06:51 crc kubenswrapper[4721]: E1202 12:06:51.992631 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2033ab25cef0c89922c91258d8c6c55549cbb351f4295a2ded8a264e79e2fcd\": container with ID starting with b2033ab25cef0c89922c91258d8c6c55549cbb351f4295a2ded8a264e79e2fcd not found: ID does not exist" containerID="b2033ab25cef0c89922c91258d8c6c55549cbb351f4295a2ded8a264e79e2fcd" Dec 02 12:06:51 crc kubenswrapper[4721]: I1202 12:06:51.992667 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2033ab25cef0c89922c91258d8c6c55549cbb351f4295a2ded8a264e79e2fcd"} err="failed to get container status \"b2033ab25cef0c89922c91258d8c6c55549cbb351f4295a2ded8a264e79e2fcd\": rpc error: code = NotFound desc = could not find container \"b2033ab25cef0c89922c91258d8c6c55549cbb351f4295a2ded8a264e79e2fcd\": container with ID starting with b2033ab25cef0c89922c91258d8c6c55549cbb351f4295a2ded8a264e79e2fcd not found: ID does not exist" Dec 02 12:06:51 crc kubenswrapper[4721]: I1202 12:06:51.992686 4721 scope.go:117] "RemoveContainer" containerID="57dc7a2fa08b3be09412ec1b1d3baa3761e14a58ead9315171bfbb241fd79b45" Dec 02 12:06:51 crc kubenswrapper[4721]: E1202 12:06:51.993039 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57dc7a2fa08b3be09412ec1b1d3baa3761e14a58ead9315171bfbb241fd79b45\": container with ID starting with 57dc7a2fa08b3be09412ec1b1d3baa3761e14a58ead9315171bfbb241fd79b45 not found: ID does not exist" containerID="57dc7a2fa08b3be09412ec1b1d3baa3761e14a58ead9315171bfbb241fd79b45" Dec 02 12:06:51 crc kubenswrapper[4721]: I1202 12:06:51.993069 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57dc7a2fa08b3be09412ec1b1d3baa3761e14a58ead9315171bfbb241fd79b45"} err="failed to get container status \"57dc7a2fa08b3be09412ec1b1d3baa3761e14a58ead9315171bfbb241fd79b45\": rpc error: code = NotFound desc = could not find container \"57dc7a2fa08b3be09412ec1b1d3baa3761e14a58ead9315171bfbb241fd79b45\": container with ID starting with 57dc7a2fa08b3be09412ec1b1d3baa3761e14a58ead9315171bfbb241fd79b45 not found: ID does not exist" Dec 02 12:06:51 crc kubenswrapper[4721]: I1202 12:06:51.993090 4721 scope.go:117] "RemoveContainer" containerID="b24ae59cc72aa18af8aa320ed9133a73e5a5a48606f96932dd658da6d7ef8acd" Dec 02 12:06:51 crc kubenswrapper[4721]: E1202 12:06:51.993359 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b24ae59cc72aa18af8aa320ed9133a73e5a5a48606f96932dd658da6d7ef8acd\": container with ID starting with b24ae59cc72aa18af8aa320ed9133a73e5a5a48606f96932dd658da6d7ef8acd not found: ID does not exist" containerID="b24ae59cc72aa18af8aa320ed9133a73e5a5a48606f96932dd658da6d7ef8acd" Dec 02 12:06:51 crc kubenswrapper[4721]: I1202 12:06:51.993385 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b24ae59cc72aa18af8aa320ed9133a73e5a5a48606f96932dd658da6d7ef8acd"} err="failed to get container status \"b24ae59cc72aa18af8aa320ed9133a73e5a5a48606f96932dd658da6d7ef8acd\": rpc error: code = NotFound desc = could not find container \"b24ae59cc72aa18af8aa320ed9133a73e5a5a48606f96932dd658da6d7ef8acd\": container with ID starting with b24ae59cc72aa18af8aa320ed9133a73e5a5a48606f96932dd658da6d7ef8acd not found: ID does not exist" Dec 02 12:06:52 crc kubenswrapper[4721]: I1202 12:06:52.039081 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abff05be-b826-4f09-8ede-6a30a486a9e3-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 12:06:52 crc kubenswrapper[4721]: I1202 12:06:52.039121 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xhpn2\" (UniqueName: \"kubernetes.io/projected/abff05be-b826-4f09-8ede-6a30a486a9e3-kube-api-access-xhpn2\") on node \"crc\" DevicePath \"\"" Dec 02 12:06:52 crc kubenswrapper[4721]: I1202 12:06:52.039136 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abff05be-b826-4f09-8ede-6a30a486a9e3-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 12:06:52 crc kubenswrapper[4721]: I1202 12:06:52.265429 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-s87cl"] Dec 02 12:06:52 crc kubenswrapper[4721]: I1202 12:06:52.279586 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-s87cl"] Dec 02 12:06:52 crc kubenswrapper[4721]: I1202 12:06:52.867570 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abff05be-b826-4f09-8ede-6a30a486a9e3" path="/var/lib/kubelet/pods/abff05be-b826-4f09-8ede-6a30a486a9e3/volumes" Dec 02 12:06:56 crc kubenswrapper[4721]: I1202 12:06:56.474901 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/rabbitmq-cluster-operator-index-fvc2h" Dec 02 12:06:56 crc kubenswrapper[4721]: I1202 12:06:56.475280 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/rabbitmq-cluster-operator-index-fvc2h" Dec 02 12:06:56 crc kubenswrapper[4721]: I1202 12:06:56.497456 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/rabbitmq-cluster-operator-index-fvc2h" Dec 02 12:06:56 crc kubenswrapper[4721]: I1202 12:06:56.980313 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/rabbitmq-cluster-operator-index-fvc2h" Dec 02 12:06:58 crc kubenswrapper[4721]: I1202 12:06:58.798979 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="swift-kuttl-tests/openstack-galera-1" Dec 02 12:06:58 crc kubenswrapper[4721]: I1202 12:06:58.799027 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/openstack-galera-1" Dec 02 12:06:58 crc kubenswrapper[4721]: I1202 12:06:58.811675 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/openstack-galera-2" Dec 02 12:06:58 crc kubenswrapper[4721]: I1202 12:06:58.811719 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="swift-kuttl-tests/openstack-galera-2" Dec 02 12:06:58 crc kubenswrapper[4721]: I1202 12:06:58.884588 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="swift-kuttl-tests/openstack-galera-2" Dec 02 12:06:59 crc kubenswrapper[4721]: I1202 12:06:59.040192 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/openstack-galera-2" Dec 02 12:06:59 crc kubenswrapper[4721]: E1202 12:06:59.800058 4721 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.169:34994->38.102.83.169:38689: write tcp 38.102.83.169:34994->38.102.83.169:38689: write: broken pipe Dec 02 12:06:59 crc kubenswrapper[4721]: I1202 12:06:59.958926 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-lsbzr"] Dec 02 12:06:59 crc kubenswrapper[4721]: E1202 12:06:59.959158 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00dd52ed-18df-40f3-a396-9c4136785284" containerName="registry-server" Dec 02 12:06:59 crc kubenswrapper[4721]: I1202 12:06:59.959170 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="00dd52ed-18df-40f3-a396-9c4136785284" containerName="registry-server" Dec 02 12:06:59 crc kubenswrapper[4721]: E1202 12:06:59.959180 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abff05be-b826-4f09-8ede-6a30a486a9e3" containerName="registry-server" Dec 02 12:06:59 crc kubenswrapper[4721]: I1202 12:06:59.959186 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="abff05be-b826-4f09-8ede-6a30a486a9e3" containerName="registry-server" Dec 02 12:06:59 crc kubenswrapper[4721]: E1202 12:06:59.959207 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abff05be-b826-4f09-8ede-6a30a486a9e3" containerName="extract-content" Dec 02 12:06:59 crc kubenswrapper[4721]: I1202 12:06:59.959213 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="abff05be-b826-4f09-8ede-6a30a486a9e3" containerName="extract-content" Dec 02 12:06:59 crc kubenswrapper[4721]: E1202 12:06:59.959225 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abff05be-b826-4f09-8ede-6a30a486a9e3" containerName="extract-utilities" Dec 02 12:06:59 crc kubenswrapper[4721]: I1202 12:06:59.959231 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="abff05be-b826-4f09-8ede-6a30a486a9e3" containerName="extract-utilities" Dec 02 12:06:59 crc kubenswrapper[4721]: I1202 12:06:59.959329 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="00dd52ed-18df-40f3-a396-9c4136785284" containerName="registry-server" Dec 02 12:06:59 crc kubenswrapper[4721]: I1202 12:06:59.959358 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="abff05be-b826-4f09-8ede-6a30a486a9e3" containerName="registry-server" Dec 02 12:06:59 crc kubenswrapper[4721]: I1202 12:06:59.960107 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lsbzr" Dec 02 12:06:59 crc kubenswrapper[4721]: I1202 12:06:59.975941 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lsbzr"] Dec 02 12:07:00 crc kubenswrapper[4721]: I1202 12:07:00.047541 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ec2af51-71b1-4e57-b6de-f94a8f26446a-catalog-content\") pod \"certified-operators-lsbzr\" (UID: \"5ec2af51-71b1-4e57-b6de-f94a8f26446a\") " pod="openshift-marketplace/certified-operators-lsbzr" Dec 02 12:07:00 crc kubenswrapper[4721]: I1202 12:07:00.047877 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drzfw\" (UniqueName: \"kubernetes.io/projected/5ec2af51-71b1-4e57-b6de-f94a8f26446a-kube-api-access-drzfw\") pod \"certified-operators-lsbzr\" (UID: \"5ec2af51-71b1-4e57-b6de-f94a8f26446a\") " pod="openshift-marketplace/certified-operators-lsbzr" Dec 02 12:07:00 crc kubenswrapper[4721]: I1202 12:07:00.047926 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ec2af51-71b1-4e57-b6de-f94a8f26446a-utilities\") pod \"certified-operators-lsbzr\" (UID: \"5ec2af51-71b1-4e57-b6de-f94a8f26446a\") " pod="openshift-marketplace/certified-operators-lsbzr" Dec 02 12:07:00 crc kubenswrapper[4721]: I1202 12:07:00.148839 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drzfw\" (UniqueName: \"kubernetes.io/projected/5ec2af51-71b1-4e57-b6de-f94a8f26446a-kube-api-access-drzfw\") pod \"certified-operators-lsbzr\" (UID: \"5ec2af51-71b1-4e57-b6de-f94a8f26446a\") " pod="openshift-marketplace/certified-operators-lsbzr" Dec 02 12:07:00 crc kubenswrapper[4721]: I1202 12:07:00.148938 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ec2af51-71b1-4e57-b6de-f94a8f26446a-utilities\") pod \"certified-operators-lsbzr\" (UID: \"5ec2af51-71b1-4e57-b6de-f94a8f26446a\") " pod="openshift-marketplace/certified-operators-lsbzr" Dec 02 12:07:00 crc kubenswrapper[4721]: I1202 12:07:00.148994 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ec2af51-71b1-4e57-b6de-f94a8f26446a-catalog-content\") pod \"certified-operators-lsbzr\" (UID: \"5ec2af51-71b1-4e57-b6de-f94a8f26446a\") " pod="openshift-marketplace/certified-operators-lsbzr" Dec 02 12:07:00 crc kubenswrapper[4721]: I1202 12:07:00.149500 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ec2af51-71b1-4e57-b6de-f94a8f26446a-utilities\") pod \"certified-operators-lsbzr\" (UID: \"5ec2af51-71b1-4e57-b6de-f94a8f26446a\") " pod="openshift-marketplace/certified-operators-lsbzr" Dec 02 12:07:00 crc kubenswrapper[4721]: I1202 12:07:00.149536 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ec2af51-71b1-4e57-b6de-f94a8f26446a-catalog-content\") pod \"certified-operators-lsbzr\" (UID: \"5ec2af51-71b1-4e57-b6de-f94a8f26446a\") " pod="openshift-marketplace/certified-operators-lsbzr" Dec 02 12:07:00 crc kubenswrapper[4721]: I1202 12:07:00.173529 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drzfw\" (UniqueName: \"kubernetes.io/projected/5ec2af51-71b1-4e57-b6de-f94a8f26446a-kube-api-access-drzfw\") pod \"certified-operators-lsbzr\" (UID: \"5ec2af51-71b1-4e57-b6de-f94a8f26446a\") " pod="openshift-marketplace/certified-operators-lsbzr" Dec 02 12:07:00 crc kubenswrapper[4721]: I1202 12:07:00.279096 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lsbzr" Dec 02 12:07:00 crc kubenswrapper[4721]: I1202 12:07:00.706881 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lsbzr"] Dec 02 12:07:00 crc kubenswrapper[4721]: W1202 12:07:00.708978 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5ec2af51_71b1_4e57_b6de_f94a8f26446a.slice/crio-23c0796f86386831e2084f7f9315f76d3d1727260df4676f5be31d53e9a01574 WatchSource:0}: Error finding container 23c0796f86386831e2084f7f9315f76d3d1727260df4676f5be31d53e9a01574: Status 404 returned error can't find the container with id 23c0796f86386831e2084f7f9315f76d3d1727260df4676f5be31d53e9a01574 Dec 02 12:07:00 crc kubenswrapper[4721]: I1202 12:07:00.984194 4721 generic.go:334] "Generic (PLEG): container finished" podID="5ec2af51-71b1-4e57-b6de-f94a8f26446a" containerID="ee4aa92f3caf123ac64427e1007d4abed61dc5a5d97777a251391f06d6cd023e" exitCode=0 Dec 02 12:07:00 crc kubenswrapper[4721]: I1202 12:07:00.984253 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lsbzr" event={"ID":"5ec2af51-71b1-4e57-b6de-f94a8f26446a","Type":"ContainerDied","Data":"ee4aa92f3caf123ac64427e1007d4abed61dc5a5d97777a251391f06d6cd023e"} Dec 02 12:07:00 crc kubenswrapper[4721]: I1202 12:07:00.984288 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lsbzr" event={"ID":"5ec2af51-71b1-4e57-b6de-f94a8f26446a","Type":"ContainerStarted","Data":"23c0796f86386831e2084f7f9315f76d3d1727260df4676f5be31d53e9a01574"} Dec 02 12:07:01 crc kubenswrapper[4721]: I1202 12:07:01.991048 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lsbzr" event={"ID":"5ec2af51-71b1-4e57-b6de-f94a8f26446a","Type":"ContainerStarted","Data":"b467cdd8314e72ae1900de106ebb7ed7e67d04c429158a05fb731b76af75c032"} Dec 02 12:07:02 crc kubenswrapper[4721]: I1202 12:07:02.997493 4721 generic.go:334] "Generic (PLEG): container finished" podID="5ec2af51-71b1-4e57-b6de-f94a8f26446a" containerID="b467cdd8314e72ae1900de106ebb7ed7e67d04c429158a05fb731b76af75c032" exitCode=0 Dec 02 12:07:02 crc kubenswrapper[4721]: I1202 12:07:02.997538 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lsbzr" event={"ID":"5ec2af51-71b1-4e57-b6de-f94a8f26446a","Type":"ContainerDied","Data":"b467cdd8314e72ae1900de106ebb7ed7e67d04c429158a05fb731b76af75c032"} Dec 02 12:07:04 crc kubenswrapper[4721]: I1202 12:07:04.025898 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lsbzr" event={"ID":"5ec2af51-71b1-4e57-b6de-f94a8f26446a","Type":"ContainerStarted","Data":"02cf8d2b4730056592bfb0ec6f22de2c4c76bf611a97a9a03a08acd8fa7e21fe"} Dec 02 12:07:04 crc kubenswrapper[4721]: I1202 12:07:04.052699 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-lsbzr" podStartSLOduration=2.291971009 podStartE2EDuration="5.052682029s" podCreationTimestamp="2025-12-02 12:06:59 +0000 UTC" firstStartedPulling="2025-12-02 12:07:00.987099098 +0000 UTC m=+950.725649467" lastFinishedPulling="2025-12-02 12:07:03.747810138 +0000 UTC m=+953.486360487" observedRunningTime="2025-12-02 12:07:04.046845329 +0000 UTC m=+953.785395688" watchObservedRunningTime="2025-12-02 12:07:04.052682029 +0000 UTC m=+953.791232388" Dec 02 12:07:09 crc kubenswrapper[4721]: I1202 12:07:09.380487 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="swift-kuttl-tests/openstack-galera-0" Dec 02 12:07:09 crc kubenswrapper[4721]: I1202 12:07:09.451519 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/openstack-galera-0" Dec 02 12:07:10 crc kubenswrapper[4721]: I1202 12:07:10.279783 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-lsbzr" Dec 02 12:07:10 crc kubenswrapper[4721]: I1202 12:07:10.279834 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-lsbzr" Dec 02 12:07:10 crc kubenswrapper[4721]: I1202 12:07:10.318519 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-lsbzr" Dec 02 12:07:11 crc kubenswrapper[4721]: I1202 12:07:11.101574 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-lsbzr" Dec 02 12:07:11 crc kubenswrapper[4721]: I1202 12:07:11.813060 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll"] Dec 02 12:07:11 crc kubenswrapper[4721]: I1202 12:07:11.815408 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll" Dec 02 12:07:11 crc kubenswrapper[4721]: I1202 12:07:11.825094 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-6fpcc" Dec 02 12:07:11 crc kubenswrapper[4721]: I1202 12:07:11.826072 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll"] Dec 02 12:07:11 crc kubenswrapper[4721]: I1202 12:07:11.903886 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vpkh\" (UniqueName: \"kubernetes.io/projected/b8097003-f4f3-4ca3-9b65-118f447c3d48-kube-api-access-2vpkh\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll\" (UID: \"b8097003-f4f3-4ca3-9b65-118f447c3d48\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll" Dec 02 12:07:11 crc kubenswrapper[4721]: I1202 12:07:11.903950 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b8097003-f4f3-4ca3-9b65-118f447c3d48-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll\" (UID: \"b8097003-f4f3-4ca3-9b65-118f447c3d48\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll" Dec 02 12:07:11 crc kubenswrapper[4721]: I1202 12:07:11.903976 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b8097003-f4f3-4ca3-9b65-118f447c3d48-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll\" (UID: \"b8097003-f4f3-4ca3-9b65-118f447c3d48\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll" Dec 02 12:07:12 crc kubenswrapper[4721]: I1202 12:07:12.004966 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vpkh\" (UniqueName: \"kubernetes.io/projected/b8097003-f4f3-4ca3-9b65-118f447c3d48-kube-api-access-2vpkh\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll\" (UID: \"b8097003-f4f3-4ca3-9b65-118f447c3d48\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll" Dec 02 12:07:12 crc kubenswrapper[4721]: I1202 12:07:12.005036 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b8097003-f4f3-4ca3-9b65-118f447c3d48-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll\" (UID: \"b8097003-f4f3-4ca3-9b65-118f447c3d48\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll" Dec 02 12:07:12 crc kubenswrapper[4721]: I1202 12:07:12.005063 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b8097003-f4f3-4ca3-9b65-118f447c3d48-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll\" (UID: \"b8097003-f4f3-4ca3-9b65-118f447c3d48\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll" Dec 02 12:07:12 crc kubenswrapper[4721]: I1202 12:07:12.005582 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b8097003-f4f3-4ca3-9b65-118f447c3d48-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll\" (UID: \"b8097003-f4f3-4ca3-9b65-118f447c3d48\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll" Dec 02 12:07:12 crc kubenswrapper[4721]: I1202 12:07:12.005845 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b8097003-f4f3-4ca3-9b65-118f447c3d48-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll\" (UID: \"b8097003-f4f3-4ca3-9b65-118f447c3d48\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll" Dec 02 12:07:12 crc kubenswrapper[4721]: I1202 12:07:12.023190 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vpkh\" (UniqueName: \"kubernetes.io/projected/b8097003-f4f3-4ca3-9b65-118f447c3d48-kube-api-access-2vpkh\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll\" (UID: \"b8097003-f4f3-4ca3-9b65-118f447c3d48\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll" Dec 02 12:07:12 crc kubenswrapper[4721]: I1202 12:07:12.133712 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll" Dec 02 12:07:12 crc kubenswrapper[4721]: I1202 12:07:12.402460 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll"] Dec 02 12:07:12 crc kubenswrapper[4721]: W1202 12:07:12.410579 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb8097003_f4f3_4ca3_9b65_118f447c3d48.slice/crio-97b0c2e3c56b4c6a9745da050c563443ca61719d0adc3dd479f23b7934fa1d5b WatchSource:0}: Error finding container 97b0c2e3c56b4c6a9745da050c563443ca61719d0adc3dd479f23b7934fa1d5b: Status 404 returned error can't find the container with id 97b0c2e3c56b4c6a9745da050c563443ca61719d0adc3dd479f23b7934fa1d5b Dec 02 12:07:12 crc kubenswrapper[4721]: I1202 12:07:12.791833 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="swift-kuttl-tests/openstack-galera-1" Dec 02 12:07:12 crc kubenswrapper[4721]: I1202 12:07:12.856422 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/openstack-galera-1" Dec 02 12:07:13 crc kubenswrapper[4721]: I1202 12:07:13.077656 4721 generic.go:334] "Generic (PLEG): container finished" podID="b8097003-f4f3-4ca3-9b65-118f447c3d48" containerID="0d43e9d61360f9b06a89842cd1d4e906cf50c0bebfbf635c357ad4c1ad7578cd" exitCode=0 Dec 02 12:07:13 crc kubenswrapper[4721]: I1202 12:07:13.077721 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll" event={"ID":"b8097003-f4f3-4ca3-9b65-118f447c3d48","Type":"ContainerDied","Data":"0d43e9d61360f9b06a89842cd1d4e906cf50c0bebfbf635c357ad4c1ad7578cd"} Dec 02 12:07:13 crc kubenswrapper[4721]: I1202 12:07:13.077767 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll" event={"ID":"b8097003-f4f3-4ca3-9b65-118f447c3d48","Type":"ContainerStarted","Data":"97b0c2e3c56b4c6a9745da050c563443ca61719d0adc3dd479f23b7934fa1d5b"} Dec 02 12:07:15 crc kubenswrapper[4721]: I1202 12:07:15.090993 4721 generic.go:334] "Generic (PLEG): container finished" podID="b8097003-f4f3-4ca3-9b65-118f447c3d48" containerID="8b0f2704fb9f94f27d0ac560e8f3865dc7642a1105cb915ddbe379c0c7a1723b" exitCode=0 Dec 02 12:07:15 crc kubenswrapper[4721]: I1202 12:07:15.091047 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll" event={"ID":"b8097003-f4f3-4ca3-9b65-118f447c3d48","Type":"ContainerDied","Data":"8b0f2704fb9f94f27d0ac560e8f3865dc7642a1105cb915ddbe379c0c7a1723b"} Dec 02 12:07:15 crc kubenswrapper[4721]: I1202 12:07:15.158773 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lsbzr"] Dec 02 12:07:15 crc kubenswrapper[4721]: I1202 12:07:15.159046 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-lsbzr" podUID="5ec2af51-71b1-4e57-b6de-f94a8f26446a" containerName="registry-server" containerID="cri-o://02cf8d2b4730056592bfb0ec6f22de2c4c76bf611a97a9a03a08acd8fa7e21fe" gracePeriod=2 Dec 02 12:07:15 crc kubenswrapper[4721]: I1202 12:07:15.578105 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lsbzr" Dec 02 12:07:15 crc kubenswrapper[4721]: I1202 12:07:15.752885 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ec2af51-71b1-4e57-b6de-f94a8f26446a-utilities\") pod \"5ec2af51-71b1-4e57-b6de-f94a8f26446a\" (UID: \"5ec2af51-71b1-4e57-b6de-f94a8f26446a\") " Dec 02 12:07:15 crc kubenswrapper[4721]: I1202 12:07:15.753240 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-drzfw\" (UniqueName: \"kubernetes.io/projected/5ec2af51-71b1-4e57-b6de-f94a8f26446a-kube-api-access-drzfw\") pod \"5ec2af51-71b1-4e57-b6de-f94a8f26446a\" (UID: \"5ec2af51-71b1-4e57-b6de-f94a8f26446a\") " Dec 02 12:07:15 crc kubenswrapper[4721]: I1202 12:07:15.753378 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ec2af51-71b1-4e57-b6de-f94a8f26446a-catalog-content\") pod \"5ec2af51-71b1-4e57-b6de-f94a8f26446a\" (UID: \"5ec2af51-71b1-4e57-b6de-f94a8f26446a\") " Dec 02 12:07:15 crc kubenswrapper[4721]: I1202 12:07:15.753692 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ec2af51-71b1-4e57-b6de-f94a8f26446a-utilities" (OuterVolumeSpecName: "utilities") pod "5ec2af51-71b1-4e57-b6de-f94a8f26446a" (UID: "5ec2af51-71b1-4e57-b6de-f94a8f26446a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:07:15 crc kubenswrapper[4721]: I1202 12:07:15.761533 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ec2af51-71b1-4e57-b6de-f94a8f26446a-kube-api-access-drzfw" (OuterVolumeSpecName: "kube-api-access-drzfw") pod "5ec2af51-71b1-4e57-b6de-f94a8f26446a" (UID: "5ec2af51-71b1-4e57-b6de-f94a8f26446a"). InnerVolumeSpecName "kube-api-access-drzfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:07:15 crc kubenswrapper[4721]: I1202 12:07:15.799337 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ec2af51-71b1-4e57-b6de-f94a8f26446a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5ec2af51-71b1-4e57-b6de-f94a8f26446a" (UID: "5ec2af51-71b1-4e57-b6de-f94a8f26446a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:07:15 crc kubenswrapper[4721]: I1202 12:07:15.854539 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ec2af51-71b1-4e57-b6de-f94a8f26446a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 12:07:15 crc kubenswrapper[4721]: I1202 12:07:15.854574 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ec2af51-71b1-4e57-b6de-f94a8f26446a-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 12:07:15 crc kubenswrapper[4721]: I1202 12:07:15.854585 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-drzfw\" (UniqueName: \"kubernetes.io/projected/5ec2af51-71b1-4e57-b6de-f94a8f26446a-kube-api-access-drzfw\") on node \"crc\" DevicePath \"\"" Dec 02 12:07:16 crc kubenswrapper[4721]: I1202 12:07:16.100625 4721 generic.go:334] "Generic (PLEG): container finished" podID="b8097003-f4f3-4ca3-9b65-118f447c3d48" containerID="6b07c8c1ef6d3a5e2c6bd5e43e3d99bde1cda9436d1c37bf2ddab883430dc9d2" exitCode=0 Dec 02 12:07:16 crc kubenswrapper[4721]: I1202 12:07:16.100714 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll" event={"ID":"b8097003-f4f3-4ca3-9b65-118f447c3d48","Type":"ContainerDied","Data":"6b07c8c1ef6d3a5e2c6bd5e43e3d99bde1cda9436d1c37bf2ddab883430dc9d2"} Dec 02 12:07:16 crc kubenswrapper[4721]: I1202 12:07:16.103869 4721 generic.go:334] "Generic (PLEG): container finished" podID="5ec2af51-71b1-4e57-b6de-f94a8f26446a" containerID="02cf8d2b4730056592bfb0ec6f22de2c4c76bf611a97a9a03a08acd8fa7e21fe" exitCode=0 Dec 02 12:07:16 crc kubenswrapper[4721]: I1202 12:07:16.103922 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lsbzr" Dec 02 12:07:16 crc kubenswrapper[4721]: I1202 12:07:16.103931 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lsbzr" event={"ID":"5ec2af51-71b1-4e57-b6de-f94a8f26446a","Type":"ContainerDied","Data":"02cf8d2b4730056592bfb0ec6f22de2c4c76bf611a97a9a03a08acd8fa7e21fe"} Dec 02 12:07:16 crc kubenswrapper[4721]: I1202 12:07:16.103971 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lsbzr" event={"ID":"5ec2af51-71b1-4e57-b6de-f94a8f26446a","Type":"ContainerDied","Data":"23c0796f86386831e2084f7f9315f76d3d1727260df4676f5be31d53e9a01574"} Dec 02 12:07:16 crc kubenswrapper[4721]: I1202 12:07:16.104003 4721 scope.go:117] "RemoveContainer" containerID="02cf8d2b4730056592bfb0ec6f22de2c4c76bf611a97a9a03a08acd8fa7e21fe" Dec 02 12:07:16 crc kubenswrapper[4721]: I1202 12:07:16.131205 4721 scope.go:117] "RemoveContainer" containerID="b467cdd8314e72ae1900de106ebb7ed7e67d04c429158a05fb731b76af75c032" Dec 02 12:07:16 crc kubenswrapper[4721]: I1202 12:07:16.140706 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lsbzr"] Dec 02 12:07:16 crc kubenswrapper[4721]: I1202 12:07:16.146056 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-lsbzr"] Dec 02 12:07:16 crc kubenswrapper[4721]: I1202 12:07:16.159794 4721 scope.go:117] "RemoveContainer" containerID="ee4aa92f3caf123ac64427e1007d4abed61dc5a5d97777a251391f06d6cd023e" Dec 02 12:07:16 crc kubenswrapper[4721]: I1202 12:07:16.187787 4721 scope.go:117] "RemoveContainer" containerID="02cf8d2b4730056592bfb0ec6f22de2c4c76bf611a97a9a03a08acd8fa7e21fe" Dec 02 12:07:16 crc kubenswrapper[4721]: E1202 12:07:16.188162 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02cf8d2b4730056592bfb0ec6f22de2c4c76bf611a97a9a03a08acd8fa7e21fe\": container with ID starting with 02cf8d2b4730056592bfb0ec6f22de2c4c76bf611a97a9a03a08acd8fa7e21fe not found: ID does not exist" containerID="02cf8d2b4730056592bfb0ec6f22de2c4c76bf611a97a9a03a08acd8fa7e21fe" Dec 02 12:07:16 crc kubenswrapper[4721]: I1202 12:07:16.188185 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02cf8d2b4730056592bfb0ec6f22de2c4c76bf611a97a9a03a08acd8fa7e21fe"} err="failed to get container status \"02cf8d2b4730056592bfb0ec6f22de2c4c76bf611a97a9a03a08acd8fa7e21fe\": rpc error: code = NotFound desc = could not find container \"02cf8d2b4730056592bfb0ec6f22de2c4c76bf611a97a9a03a08acd8fa7e21fe\": container with ID starting with 02cf8d2b4730056592bfb0ec6f22de2c4c76bf611a97a9a03a08acd8fa7e21fe not found: ID does not exist" Dec 02 12:07:16 crc kubenswrapper[4721]: I1202 12:07:16.188206 4721 scope.go:117] "RemoveContainer" containerID="b467cdd8314e72ae1900de106ebb7ed7e67d04c429158a05fb731b76af75c032" Dec 02 12:07:16 crc kubenswrapper[4721]: E1202 12:07:16.188575 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b467cdd8314e72ae1900de106ebb7ed7e67d04c429158a05fb731b76af75c032\": container with ID starting with b467cdd8314e72ae1900de106ebb7ed7e67d04c429158a05fb731b76af75c032 not found: ID does not exist" containerID="b467cdd8314e72ae1900de106ebb7ed7e67d04c429158a05fb731b76af75c032" Dec 02 12:07:16 crc kubenswrapper[4721]: I1202 12:07:16.188591 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b467cdd8314e72ae1900de106ebb7ed7e67d04c429158a05fb731b76af75c032"} err="failed to get container status \"b467cdd8314e72ae1900de106ebb7ed7e67d04c429158a05fb731b76af75c032\": rpc error: code = NotFound desc = could not find container \"b467cdd8314e72ae1900de106ebb7ed7e67d04c429158a05fb731b76af75c032\": container with ID starting with b467cdd8314e72ae1900de106ebb7ed7e67d04c429158a05fb731b76af75c032 not found: ID does not exist" Dec 02 12:07:16 crc kubenswrapper[4721]: I1202 12:07:16.188603 4721 scope.go:117] "RemoveContainer" containerID="ee4aa92f3caf123ac64427e1007d4abed61dc5a5d97777a251391f06d6cd023e" Dec 02 12:07:16 crc kubenswrapper[4721]: E1202 12:07:16.188906 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee4aa92f3caf123ac64427e1007d4abed61dc5a5d97777a251391f06d6cd023e\": container with ID starting with ee4aa92f3caf123ac64427e1007d4abed61dc5a5d97777a251391f06d6cd023e not found: ID does not exist" containerID="ee4aa92f3caf123ac64427e1007d4abed61dc5a5d97777a251391f06d6cd023e" Dec 02 12:07:16 crc kubenswrapper[4721]: I1202 12:07:16.188920 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee4aa92f3caf123ac64427e1007d4abed61dc5a5d97777a251391f06d6cd023e"} err="failed to get container status \"ee4aa92f3caf123ac64427e1007d4abed61dc5a5d97777a251391f06d6cd023e\": rpc error: code = NotFound desc = could not find container \"ee4aa92f3caf123ac64427e1007d4abed61dc5a5d97777a251391f06d6cd023e\": container with ID starting with ee4aa92f3caf123ac64427e1007d4abed61dc5a5d97777a251391f06d6cd023e not found: ID does not exist" Dec 02 12:07:16 crc kubenswrapper[4721]: I1202 12:07:16.850979 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ec2af51-71b1-4e57-b6de-f94a8f26446a" path="/var/lib/kubelet/pods/5ec2af51-71b1-4e57-b6de-f94a8f26446a/volumes" Dec 02 12:07:17 crc kubenswrapper[4721]: I1202 12:07:17.459554 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll" Dec 02 12:07:17 crc kubenswrapper[4721]: I1202 12:07:17.581950 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b8097003-f4f3-4ca3-9b65-118f447c3d48-util\") pod \"b8097003-f4f3-4ca3-9b65-118f447c3d48\" (UID: \"b8097003-f4f3-4ca3-9b65-118f447c3d48\") " Dec 02 12:07:17 crc kubenswrapper[4721]: I1202 12:07:17.582029 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2vpkh\" (UniqueName: \"kubernetes.io/projected/b8097003-f4f3-4ca3-9b65-118f447c3d48-kube-api-access-2vpkh\") pod \"b8097003-f4f3-4ca3-9b65-118f447c3d48\" (UID: \"b8097003-f4f3-4ca3-9b65-118f447c3d48\") " Dec 02 12:07:17 crc kubenswrapper[4721]: I1202 12:07:17.582177 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b8097003-f4f3-4ca3-9b65-118f447c3d48-bundle\") pod \"b8097003-f4f3-4ca3-9b65-118f447c3d48\" (UID: \"b8097003-f4f3-4ca3-9b65-118f447c3d48\") " Dec 02 12:07:17 crc kubenswrapper[4721]: I1202 12:07:17.583295 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8097003-f4f3-4ca3-9b65-118f447c3d48-bundle" (OuterVolumeSpecName: "bundle") pod "b8097003-f4f3-4ca3-9b65-118f447c3d48" (UID: "b8097003-f4f3-4ca3-9b65-118f447c3d48"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:07:17 crc kubenswrapper[4721]: I1202 12:07:17.595139 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8097003-f4f3-4ca3-9b65-118f447c3d48-kube-api-access-2vpkh" (OuterVolumeSpecName: "kube-api-access-2vpkh") pod "b8097003-f4f3-4ca3-9b65-118f447c3d48" (UID: "b8097003-f4f3-4ca3-9b65-118f447c3d48"). InnerVolumeSpecName "kube-api-access-2vpkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:07:17 crc kubenswrapper[4721]: I1202 12:07:17.596921 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8097003-f4f3-4ca3-9b65-118f447c3d48-util" (OuterVolumeSpecName: "util") pod "b8097003-f4f3-4ca3-9b65-118f447c3d48" (UID: "b8097003-f4f3-4ca3-9b65-118f447c3d48"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:07:17 crc kubenswrapper[4721]: I1202 12:07:17.684522 4721 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b8097003-f4f3-4ca3-9b65-118f447c3d48-util\") on node \"crc\" DevicePath \"\"" Dec 02 12:07:17 crc kubenswrapper[4721]: I1202 12:07:17.684568 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2vpkh\" (UniqueName: \"kubernetes.io/projected/b8097003-f4f3-4ca3-9b65-118f447c3d48-kube-api-access-2vpkh\") on node \"crc\" DevicePath \"\"" Dec 02 12:07:17 crc kubenswrapper[4721]: I1202 12:07:17.684592 4721 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b8097003-f4f3-4ca3-9b65-118f447c3d48-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 12:07:18 crc kubenswrapper[4721]: I1202 12:07:18.122278 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll" event={"ID":"b8097003-f4f3-4ca3-9b65-118f447c3d48","Type":"ContainerDied","Data":"97b0c2e3c56b4c6a9745da050c563443ca61719d0adc3dd479f23b7934fa1d5b"} Dec 02 12:07:18 crc kubenswrapper[4721]: I1202 12:07:18.122326 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="97b0c2e3c56b4c6a9745da050c563443ca61719d0adc3dd479f23b7934fa1d5b" Dec 02 12:07:18 crc kubenswrapper[4721]: I1202 12:07:18.122329 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll" Dec 02 12:07:25 crc kubenswrapper[4721]: I1202 12:07:25.702677 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mcr6s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 12:07:25 crc kubenswrapper[4721]: I1202 12:07:25.703293 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 12:07:26 crc kubenswrapper[4721]: I1202 12:07:26.450678 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-wgmmg"] Dec 02 12:07:26 crc kubenswrapper[4721]: E1202 12:07:26.451037 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ec2af51-71b1-4e57-b6de-f94a8f26446a" containerName="extract-utilities" Dec 02 12:07:26 crc kubenswrapper[4721]: I1202 12:07:26.451063 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ec2af51-71b1-4e57-b6de-f94a8f26446a" containerName="extract-utilities" Dec 02 12:07:26 crc kubenswrapper[4721]: E1202 12:07:26.451087 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8097003-f4f3-4ca3-9b65-118f447c3d48" containerName="pull" Dec 02 12:07:26 crc kubenswrapper[4721]: I1202 12:07:26.451101 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8097003-f4f3-4ca3-9b65-118f447c3d48" containerName="pull" Dec 02 12:07:26 crc kubenswrapper[4721]: E1202 12:07:26.451124 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8097003-f4f3-4ca3-9b65-118f447c3d48" containerName="util" Dec 02 12:07:26 crc kubenswrapper[4721]: I1202 12:07:26.451138 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8097003-f4f3-4ca3-9b65-118f447c3d48" containerName="util" Dec 02 12:07:26 crc kubenswrapper[4721]: E1202 12:07:26.451162 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8097003-f4f3-4ca3-9b65-118f447c3d48" containerName="extract" Dec 02 12:07:26 crc kubenswrapper[4721]: I1202 12:07:26.451174 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8097003-f4f3-4ca3-9b65-118f447c3d48" containerName="extract" Dec 02 12:07:26 crc kubenswrapper[4721]: E1202 12:07:26.451191 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ec2af51-71b1-4e57-b6de-f94a8f26446a" containerName="registry-server" Dec 02 12:07:26 crc kubenswrapper[4721]: I1202 12:07:26.451203 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ec2af51-71b1-4e57-b6de-f94a8f26446a" containerName="registry-server" Dec 02 12:07:26 crc kubenswrapper[4721]: E1202 12:07:26.451226 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ec2af51-71b1-4e57-b6de-f94a8f26446a" containerName="extract-content" Dec 02 12:07:26 crc kubenswrapper[4721]: I1202 12:07:26.451240 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ec2af51-71b1-4e57-b6de-f94a8f26446a" containerName="extract-content" Dec 02 12:07:26 crc kubenswrapper[4721]: I1202 12:07:26.451461 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ec2af51-71b1-4e57-b6de-f94a8f26446a" containerName="registry-server" Dec 02 12:07:26 crc kubenswrapper[4721]: I1202 12:07:26.451483 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8097003-f4f3-4ca3-9b65-118f447c3d48" containerName="extract" Dec 02 12:07:26 crc kubenswrapper[4721]: I1202 12:07:26.452219 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-wgmmg" Dec 02 12:07:26 crc kubenswrapper[4721]: I1202 12:07:26.456271 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-dockercfg-77vn4" Dec 02 12:07:26 crc kubenswrapper[4721]: I1202 12:07:26.459684 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-wgmmg"] Dec 02 12:07:26 crc kubenswrapper[4721]: I1202 12:07:26.607333 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvzjx\" (UniqueName: \"kubernetes.io/projected/446fee83-175f-44a1-b4a5-24f8150e2d43-kube-api-access-zvzjx\") pod \"rabbitmq-cluster-operator-779fc9694b-wgmmg\" (UID: \"446fee83-175f-44a1-b4a5-24f8150e2d43\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-wgmmg" Dec 02 12:07:26 crc kubenswrapper[4721]: I1202 12:07:26.708982 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvzjx\" (UniqueName: \"kubernetes.io/projected/446fee83-175f-44a1-b4a5-24f8150e2d43-kube-api-access-zvzjx\") pod \"rabbitmq-cluster-operator-779fc9694b-wgmmg\" (UID: \"446fee83-175f-44a1-b4a5-24f8150e2d43\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-wgmmg" Dec 02 12:07:26 crc kubenswrapper[4721]: I1202 12:07:26.727660 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvzjx\" (UniqueName: \"kubernetes.io/projected/446fee83-175f-44a1-b4a5-24f8150e2d43-kube-api-access-zvzjx\") pod \"rabbitmq-cluster-operator-779fc9694b-wgmmg\" (UID: \"446fee83-175f-44a1-b4a5-24f8150e2d43\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-wgmmg" Dec 02 12:07:26 crc kubenswrapper[4721]: I1202 12:07:26.775421 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-wgmmg" Dec 02 12:07:27 crc kubenswrapper[4721]: I1202 12:07:27.097629 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-wgmmg"] Dec 02 12:07:27 crc kubenswrapper[4721]: I1202 12:07:27.191212 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-wgmmg" event={"ID":"446fee83-175f-44a1-b4a5-24f8150e2d43","Type":"ContainerStarted","Data":"39a1488744b97037e55b4edf9fbbecdfb11e095d93151f152689657f0f18dc26"} Dec 02 12:07:31 crc kubenswrapper[4721]: I1202 12:07:31.221924 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-wgmmg" event={"ID":"446fee83-175f-44a1-b4a5-24f8150e2d43","Type":"ContainerStarted","Data":"2da6a71fca99c11864a01378be7ed54675690966ed484cd16e07dfb1655611ad"} Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.389283 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-wgmmg" podStartSLOduration=5.41992361 podStartE2EDuration="8.389263997s" podCreationTimestamp="2025-12-02 12:07:26 +0000 UTC" firstStartedPulling="2025-12-02 12:07:27.113152583 +0000 UTC m=+976.851702932" lastFinishedPulling="2025-12-02 12:07:30.08249297 +0000 UTC m=+979.821043319" observedRunningTime="2025-12-02 12:07:31.240015838 +0000 UTC m=+980.978566227" watchObservedRunningTime="2025-12-02 12:07:34.389263997 +0000 UTC m=+984.127814346" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.395747 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/rabbitmq-server-0"] Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.396723 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.398555 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"rabbitmq-server-conf" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.398843 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"rabbitmq-plugins-conf" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.399415 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"rabbitmq-erlang-cookie" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.399446 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"rabbitmq-default-user" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.399687 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"rabbitmq-server-dockercfg-zclwz" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.406517 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/rabbitmq-server-0"] Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.516785 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/89920af2-3af7-4428-80fa-ddf2ba97dfb9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.517066 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/89920af2-3af7-4428-80fa-ddf2ba97dfb9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.517181 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/89920af2-3af7-4428-80fa-ddf2ba97dfb9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.517268 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-fb30f42f-5218-4cc7-bbdd-7032754704a9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fb30f42f-5218-4cc7-bbdd-7032754704a9\") pod \"rabbitmq-server-0\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.517359 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/89920af2-3af7-4428-80fa-ddf2ba97dfb9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.517498 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bp2dh\" (UniqueName: \"kubernetes.io/projected/89920af2-3af7-4428-80fa-ddf2ba97dfb9-kube-api-access-bp2dh\") pod \"rabbitmq-server-0\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.517592 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/89920af2-3af7-4428-80fa-ddf2ba97dfb9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.517678 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/89920af2-3af7-4428-80fa-ddf2ba97dfb9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.618544 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/89920af2-3af7-4428-80fa-ddf2ba97dfb9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.618819 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/89920af2-3af7-4428-80fa-ddf2ba97dfb9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.618901 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-fb30f42f-5218-4cc7-bbdd-7032754704a9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fb30f42f-5218-4cc7-bbdd-7032754704a9\") pod \"rabbitmq-server-0\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.618990 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/89920af2-3af7-4428-80fa-ddf2ba97dfb9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.619097 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bp2dh\" (UniqueName: \"kubernetes.io/projected/89920af2-3af7-4428-80fa-ddf2ba97dfb9-kube-api-access-bp2dh\") pod \"rabbitmq-server-0\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.619174 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/89920af2-3af7-4428-80fa-ddf2ba97dfb9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.619146 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/89920af2-3af7-4428-80fa-ddf2ba97dfb9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.619249 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/89920af2-3af7-4428-80fa-ddf2ba97dfb9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.619395 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/89920af2-3af7-4428-80fa-ddf2ba97dfb9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.619599 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/89920af2-3af7-4428-80fa-ddf2ba97dfb9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.620247 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/89920af2-3af7-4428-80fa-ddf2ba97dfb9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.621750 4721 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.621808 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-fb30f42f-5218-4cc7-bbdd-7032754704a9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fb30f42f-5218-4cc7-bbdd-7032754704a9\") pod \"rabbitmq-server-0\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/9d6cefd24f857212e04803186e75d09696e7933eeda3624d804e058d1e099530/globalmount\"" pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.625117 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/89920af2-3af7-4428-80fa-ddf2ba97dfb9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.625120 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/89920af2-3af7-4428-80fa-ddf2ba97dfb9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.630276 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/89920af2-3af7-4428-80fa-ddf2ba97dfb9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.639940 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bp2dh\" (UniqueName: \"kubernetes.io/projected/89920af2-3af7-4428-80fa-ddf2ba97dfb9-kube-api-access-bp2dh\") pod \"rabbitmq-server-0\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.653954 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-fb30f42f-5218-4cc7-bbdd-7032754704a9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fb30f42f-5218-4cc7-bbdd-7032754704a9\") pod \"rabbitmq-server-0\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:07:34 crc kubenswrapper[4721]: I1202 12:07:34.717146 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:07:35 crc kubenswrapper[4721]: I1202 12:07:35.186485 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/rabbitmq-server-0"] Dec 02 12:07:35 crc kubenswrapper[4721]: I1202 12:07:35.251539 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/rabbitmq-server-0" event={"ID":"89920af2-3af7-4428-80fa-ddf2ba97dfb9","Type":"ContainerStarted","Data":"5177285996d31ce58bb9dca4ed33ba9f6b0f3769f466c38c9494b3d5b95e6a0e"} Dec 02 12:07:35 crc kubenswrapper[4721]: I1202 12:07:35.973852 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-index-4f9gs"] Dec 02 12:07:35 crc kubenswrapper[4721]: I1202 12:07:35.979399 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-4f9gs" Dec 02 12:07:35 crc kubenswrapper[4721]: I1202 12:07:35.982258 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-index-dockercfg-fszft" Dec 02 12:07:35 crc kubenswrapper[4721]: I1202 12:07:35.985090 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-4f9gs"] Dec 02 12:07:36 crc kubenswrapper[4721]: I1202 12:07:36.098074 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pd8s\" (UniqueName: \"kubernetes.io/projected/b7499522-49b1-4362-a257-50c2d2bf3bdb-kube-api-access-4pd8s\") pod \"keystone-operator-index-4f9gs\" (UID: \"b7499522-49b1-4362-a257-50c2d2bf3bdb\") " pod="openstack-operators/keystone-operator-index-4f9gs" Dec 02 12:07:36 crc kubenswrapper[4721]: I1202 12:07:36.199724 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pd8s\" (UniqueName: \"kubernetes.io/projected/b7499522-49b1-4362-a257-50c2d2bf3bdb-kube-api-access-4pd8s\") pod \"keystone-operator-index-4f9gs\" (UID: \"b7499522-49b1-4362-a257-50c2d2bf3bdb\") " pod="openstack-operators/keystone-operator-index-4f9gs" Dec 02 12:07:36 crc kubenswrapper[4721]: I1202 12:07:36.224082 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pd8s\" (UniqueName: \"kubernetes.io/projected/b7499522-49b1-4362-a257-50c2d2bf3bdb-kube-api-access-4pd8s\") pod \"keystone-operator-index-4f9gs\" (UID: \"b7499522-49b1-4362-a257-50c2d2bf3bdb\") " pod="openstack-operators/keystone-operator-index-4f9gs" Dec 02 12:07:36 crc kubenswrapper[4721]: I1202 12:07:36.315646 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-4f9gs" Dec 02 12:07:36 crc kubenswrapper[4721]: I1202 12:07:36.743277 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-4f9gs"] Dec 02 12:07:38 crc kubenswrapper[4721]: W1202 12:07:38.337557 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb7499522_49b1_4362_a257_50c2d2bf3bdb.slice/crio-222b8bd358c288191b4c22f099e95fb6f6178638715433f93ee452263753b16d WatchSource:0}: Error finding container 222b8bd358c288191b4c22f099e95fb6f6178638715433f93ee452263753b16d: Status 404 returned error can't find the container with id 222b8bd358c288191b4c22f099e95fb6f6178638715433f93ee452263753b16d Dec 02 12:07:39 crc kubenswrapper[4721]: I1202 12:07:39.279671 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-4f9gs" event={"ID":"b7499522-49b1-4362-a257-50c2d2bf3bdb","Type":"ContainerStarted","Data":"222b8bd358c288191b4c22f099e95fb6f6178638715433f93ee452263753b16d"} Dec 02 12:07:41 crc kubenswrapper[4721]: I1202 12:07:41.294644 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/rabbitmq-server-0" event={"ID":"89920af2-3af7-4428-80fa-ddf2ba97dfb9","Type":"ContainerStarted","Data":"c07b2c4f609e7871d95b88adeef4fe6df645e60283b4626904994275357d4003"} Dec 02 12:07:41 crc kubenswrapper[4721]: I1202 12:07:41.296892 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-4f9gs" event={"ID":"b7499522-49b1-4362-a257-50c2d2bf3bdb","Type":"ContainerStarted","Data":"90b864ac038bfb49dd40394b0f414f074eae493a77e3c2bcb1c4c7072cd1e089"} Dec 02 12:07:41 crc kubenswrapper[4721]: I1202 12:07:41.330702 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-index-4f9gs" podStartSLOduration=4.240566451 podStartE2EDuration="6.33068709s" podCreationTimestamp="2025-12-02 12:07:35 +0000 UTC" firstStartedPulling="2025-12-02 12:07:38.338580802 +0000 UTC m=+988.077131151" lastFinishedPulling="2025-12-02 12:07:40.428701441 +0000 UTC m=+990.167251790" observedRunningTime="2025-12-02 12:07:41.330540515 +0000 UTC m=+991.069090884" watchObservedRunningTime="2025-12-02 12:07:41.33068709 +0000 UTC m=+991.069237439" Dec 02 12:07:46 crc kubenswrapper[4721]: I1202 12:07:46.316556 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-index-4f9gs" Dec 02 12:07:46 crc kubenswrapper[4721]: I1202 12:07:46.317268 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/keystone-operator-index-4f9gs" Dec 02 12:07:46 crc kubenswrapper[4721]: I1202 12:07:46.380087 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/keystone-operator-index-4f9gs" Dec 02 12:07:46 crc kubenswrapper[4721]: I1202 12:07:46.412619 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-index-4f9gs" Dec 02 12:07:48 crc kubenswrapper[4721]: I1202 12:07:48.206007 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d"] Dec 02 12:07:48 crc kubenswrapper[4721]: I1202 12:07:48.207802 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d" Dec 02 12:07:48 crc kubenswrapper[4721]: I1202 12:07:48.209668 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-6fpcc" Dec 02 12:07:48 crc kubenswrapper[4721]: I1202 12:07:48.216852 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d"] Dec 02 12:07:48 crc kubenswrapper[4721]: I1202 12:07:48.276471 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f708318e-7bf1-4974-90cb-1b800029af79-util\") pod \"49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d\" (UID: \"f708318e-7bf1-4974-90cb-1b800029af79\") " pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d" Dec 02 12:07:48 crc kubenswrapper[4721]: I1202 12:07:48.276581 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcrr8\" (UniqueName: \"kubernetes.io/projected/f708318e-7bf1-4974-90cb-1b800029af79-kube-api-access-bcrr8\") pod \"49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d\" (UID: \"f708318e-7bf1-4974-90cb-1b800029af79\") " pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d" Dec 02 12:07:48 crc kubenswrapper[4721]: I1202 12:07:48.276951 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f708318e-7bf1-4974-90cb-1b800029af79-bundle\") pod \"49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d\" (UID: \"f708318e-7bf1-4974-90cb-1b800029af79\") " pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d" Dec 02 12:07:48 crc kubenswrapper[4721]: I1202 12:07:48.378468 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f708318e-7bf1-4974-90cb-1b800029af79-bundle\") pod \"49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d\" (UID: \"f708318e-7bf1-4974-90cb-1b800029af79\") " pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d" Dec 02 12:07:48 crc kubenswrapper[4721]: I1202 12:07:48.378531 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f708318e-7bf1-4974-90cb-1b800029af79-util\") pod \"49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d\" (UID: \"f708318e-7bf1-4974-90cb-1b800029af79\") " pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d" Dec 02 12:07:48 crc kubenswrapper[4721]: I1202 12:07:48.378577 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcrr8\" (UniqueName: \"kubernetes.io/projected/f708318e-7bf1-4974-90cb-1b800029af79-kube-api-access-bcrr8\") pod \"49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d\" (UID: \"f708318e-7bf1-4974-90cb-1b800029af79\") " pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d" Dec 02 12:07:48 crc kubenswrapper[4721]: I1202 12:07:48.379049 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f708318e-7bf1-4974-90cb-1b800029af79-bundle\") pod \"49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d\" (UID: \"f708318e-7bf1-4974-90cb-1b800029af79\") " pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d" Dec 02 12:07:48 crc kubenswrapper[4721]: I1202 12:07:48.379112 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f708318e-7bf1-4974-90cb-1b800029af79-util\") pod \"49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d\" (UID: \"f708318e-7bf1-4974-90cb-1b800029af79\") " pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d" Dec 02 12:07:48 crc kubenswrapper[4721]: I1202 12:07:48.395930 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcrr8\" (UniqueName: \"kubernetes.io/projected/f708318e-7bf1-4974-90cb-1b800029af79-kube-api-access-bcrr8\") pod \"49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d\" (UID: \"f708318e-7bf1-4974-90cb-1b800029af79\") " pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d" Dec 02 12:07:48 crc kubenswrapper[4721]: I1202 12:07:48.524120 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d" Dec 02 12:07:48 crc kubenswrapper[4721]: I1202 12:07:48.959184 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d"] Dec 02 12:07:48 crc kubenswrapper[4721]: W1202 12:07:48.965898 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf708318e_7bf1_4974_90cb_1b800029af79.slice/crio-60fc30d42cefb76b052fb42422e99329a50c049c0a496608697aced2176e2c09 WatchSource:0}: Error finding container 60fc30d42cefb76b052fb42422e99329a50c049c0a496608697aced2176e2c09: Status 404 returned error can't find the container with id 60fc30d42cefb76b052fb42422e99329a50c049c0a496608697aced2176e2c09 Dec 02 12:07:49 crc kubenswrapper[4721]: I1202 12:07:49.348254 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d" event={"ID":"f708318e-7bf1-4974-90cb-1b800029af79","Type":"ContainerStarted","Data":"60fc30d42cefb76b052fb42422e99329a50c049c0a496608697aced2176e2c09"} Dec 02 12:07:50 crc kubenswrapper[4721]: I1202 12:07:50.359235 4721 generic.go:334] "Generic (PLEG): container finished" podID="f708318e-7bf1-4974-90cb-1b800029af79" containerID="3d2811372dcaa70a66210d9a4e9105876d21bcdb977c4e8b73cc0002a8ccde32" exitCode=0 Dec 02 12:07:50 crc kubenswrapper[4721]: I1202 12:07:50.359500 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d" event={"ID":"f708318e-7bf1-4974-90cb-1b800029af79","Type":"ContainerDied","Data":"3d2811372dcaa70a66210d9a4e9105876d21bcdb977c4e8b73cc0002a8ccde32"} Dec 02 12:07:52 crc kubenswrapper[4721]: I1202 12:07:52.376719 4721 generic.go:334] "Generic (PLEG): container finished" podID="f708318e-7bf1-4974-90cb-1b800029af79" containerID="3657190670b930bd797e45c74d816925ec80717d14b18e7006b8f6b6e3f7deb9" exitCode=0 Dec 02 12:07:52 crc kubenswrapper[4721]: I1202 12:07:52.376814 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d" event={"ID":"f708318e-7bf1-4974-90cb-1b800029af79","Type":"ContainerDied","Data":"3657190670b930bd797e45c74d816925ec80717d14b18e7006b8f6b6e3f7deb9"} Dec 02 12:07:53 crc kubenswrapper[4721]: I1202 12:07:53.386678 4721 generic.go:334] "Generic (PLEG): container finished" podID="f708318e-7bf1-4974-90cb-1b800029af79" containerID="7beea8932fafcaff62d322d433c687da18e8d2f63e82a7e4f62d21d07c22445a" exitCode=0 Dec 02 12:07:53 crc kubenswrapper[4721]: I1202 12:07:53.387026 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d" event={"ID":"f708318e-7bf1-4974-90cb-1b800029af79","Type":"ContainerDied","Data":"7beea8932fafcaff62d322d433c687da18e8d2f63e82a7e4f62d21d07c22445a"} Dec 02 12:07:54 crc kubenswrapper[4721]: I1202 12:07:54.628053 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d" Dec 02 12:07:54 crc kubenswrapper[4721]: I1202 12:07:54.660844 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f708318e-7bf1-4974-90cb-1b800029af79-bundle\") pod \"f708318e-7bf1-4974-90cb-1b800029af79\" (UID: \"f708318e-7bf1-4974-90cb-1b800029af79\") " Dec 02 12:07:54 crc kubenswrapper[4721]: I1202 12:07:54.660897 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f708318e-7bf1-4974-90cb-1b800029af79-util\") pod \"f708318e-7bf1-4974-90cb-1b800029af79\" (UID: \"f708318e-7bf1-4974-90cb-1b800029af79\") " Dec 02 12:07:54 crc kubenswrapper[4721]: I1202 12:07:54.660924 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bcrr8\" (UniqueName: \"kubernetes.io/projected/f708318e-7bf1-4974-90cb-1b800029af79-kube-api-access-bcrr8\") pod \"f708318e-7bf1-4974-90cb-1b800029af79\" (UID: \"f708318e-7bf1-4974-90cb-1b800029af79\") " Dec 02 12:07:54 crc kubenswrapper[4721]: I1202 12:07:54.662592 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f708318e-7bf1-4974-90cb-1b800029af79-bundle" (OuterVolumeSpecName: "bundle") pod "f708318e-7bf1-4974-90cb-1b800029af79" (UID: "f708318e-7bf1-4974-90cb-1b800029af79"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:07:54 crc kubenswrapper[4721]: I1202 12:07:54.667962 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f708318e-7bf1-4974-90cb-1b800029af79-kube-api-access-bcrr8" (OuterVolumeSpecName: "kube-api-access-bcrr8") pod "f708318e-7bf1-4974-90cb-1b800029af79" (UID: "f708318e-7bf1-4974-90cb-1b800029af79"). InnerVolumeSpecName "kube-api-access-bcrr8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:07:54 crc kubenswrapper[4721]: I1202 12:07:54.677104 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f708318e-7bf1-4974-90cb-1b800029af79-util" (OuterVolumeSpecName: "util") pod "f708318e-7bf1-4974-90cb-1b800029af79" (UID: "f708318e-7bf1-4974-90cb-1b800029af79"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:07:54 crc kubenswrapper[4721]: I1202 12:07:54.763499 4721 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f708318e-7bf1-4974-90cb-1b800029af79-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 12:07:54 crc kubenswrapper[4721]: I1202 12:07:54.763925 4721 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f708318e-7bf1-4974-90cb-1b800029af79-util\") on node \"crc\" DevicePath \"\"" Dec 02 12:07:54 crc kubenswrapper[4721]: I1202 12:07:54.764007 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bcrr8\" (UniqueName: \"kubernetes.io/projected/f708318e-7bf1-4974-90cb-1b800029af79-kube-api-access-bcrr8\") on node \"crc\" DevicePath \"\"" Dec 02 12:07:55 crc kubenswrapper[4721]: I1202 12:07:55.401078 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d" event={"ID":"f708318e-7bf1-4974-90cb-1b800029af79","Type":"ContainerDied","Data":"60fc30d42cefb76b052fb42422e99329a50c049c0a496608697aced2176e2c09"} Dec 02 12:07:55 crc kubenswrapper[4721]: I1202 12:07:55.401124 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="60fc30d42cefb76b052fb42422e99329a50c049c0a496608697aced2176e2c09" Dec 02 12:07:55 crc kubenswrapper[4721]: I1202 12:07:55.401180 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d" Dec 02 12:07:55 crc kubenswrapper[4721]: I1202 12:07:55.703706 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mcr6s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 12:07:55 crc kubenswrapper[4721]: I1202 12:07:55.704114 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 12:08:09 crc kubenswrapper[4721]: I1202 12:08:09.431590 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-77bbd48959-gdffp"] Dec 02 12:08:09 crc kubenswrapper[4721]: E1202 12:08:09.432298 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f708318e-7bf1-4974-90cb-1b800029af79" containerName="util" Dec 02 12:08:09 crc kubenswrapper[4721]: I1202 12:08:09.432309 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f708318e-7bf1-4974-90cb-1b800029af79" containerName="util" Dec 02 12:08:09 crc kubenswrapper[4721]: E1202 12:08:09.432326 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f708318e-7bf1-4974-90cb-1b800029af79" containerName="pull" Dec 02 12:08:09 crc kubenswrapper[4721]: I1202 12:08:09.432333 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f708318e-7bf1-4974-90cb-1b800029af79" containerName="pull" Dec 02 12:08:09 crc kubenswrapper[4721]: E1202 12:08:09.432363 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f708318e-7bf1-4974-90cb-1b800029af79" containerName="extract" Dec 02 12:08:09 crc kubenswrapper[4721]: I1202 12:08:09.432370 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f708318e-7bf1-4974-90cb-1b800029af79" containerName="extract" Dec 02 12:08:09 crc kubenswrapper[4721]: I1202 12:08:09.432476 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f708318e-7bf1-4974-90cb-1b800029af79" containerName="extract" Dec 02 12:08:09 crc kubenswrapper[4721]: I1202 12:08:09.432883 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-77bbd48959-gdffp" Dec 02 12:08:09 crc kubenswrapper[4721]: I1202 12:08:09.435153 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-service-cert" Dec 02 12:08:09 crc kubenswrapper[4721]: I1202 12:08:09.439428 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-c7w86" Dec 02 12:08:09 crc kubenswrapper[4721]: I1202 12:08:09.457187 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-77bbd48959-gdffp"] Dec 02 12:08:09 crc kubenswrapper[4721]: I1202 12:08:09.457907 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b79c462f-8fbc-41aa-9f98-c00c17ff9705-apiservice-cert\") pod \"keystone-operator-controller-manager-77bbd48959-gdffp\" (UID: \"b79c462f-8fbc-41aa-9f98-c00c17ff9705\") " pod="openstack-operators/keystone-operator-controller-manager-77bbd48959-gdffp" Dec 02 12:08:09 crc kubenswrapper[4721]: I1202 12:08:09.457958 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b79c462f-8fbc-41aa-9f98-c00c17ff9705-webhook-cert\") pod \"keystone-operator-controller-manager-77bbd48959-gdffp\" (UID: \"b79c462f-8fbc-41aa-9f98-c00c17ff9705\") " pod="openstack-operators/keystone-operator-controller-manager-77bbd48959-gdffp" Dec 02 12:08:09 crc kubenswrapper[4721]: I1202 12:08:09.457979 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vvqk\" (UniqueName: \"kubernetes.io/projected/b79c462f-8fbc-41aa-9f98-c00c17ff9705-kube-api-access-7vvqk\") pod \"keystone-operator-controller-manager-77bbd48959-gdffp\" (UID: \"b79c462f-8fbc-41aa-9f98-c00c17ff9705\") " pod="openstack-operators/keystone-operator-controller-manager-77bbd48959-gdffp" Dec 02 12:08:09 crc kubenswrapper[4721]: I1202 12:08:09.559742 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b79c462f-8fbc-41aa-9f98-c00c17ff9705-webhook-cert\") pod \"keystone-operator-controller-manager-77bbd48959-gdffp\" (UID: \"b79c462f-8fbc-41aa-9f98-c00c17ff9705\") " pod="openstack-operators/keystone-operator-controller-manager-77bbd48959-gdffp" Dec 02 12:08:09 crc kubenswrapper[4721]: I1202 12:08:09.559787 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vvqk\" (UniqueName: \"kubernetes.io/projected/b79c462f-8fbc-41aa-9f98-c00c17ff9705-kube-api-access-7vvqk\") pod \"keystone-operator-controller-manager-77bbd48959-gdffp\" (UID: \"b79c462f-8fbc-41aa-9f98-c00c17ff9705\") " pod="openstack-operators/keystone-operator-controller-manager-77bbd48959-gdffp" Dec 02 12:08:09 crc kubenswrapper[4721]: I1202 12:08:09.559868 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b79c462f-8fbc-41aa-9f98-c00c17ff9705-apiservice-cert\") pod \"keystone-operator-controller-manager-77bbd48959-gdffp\" (UID: \"b79c462f-8fbc-41aa-9f98-c00c17ff9705\") " pod="openstack-operators/keystone-operator-controller-manager-77bbd48959-gdffp" Dec 02 12:08:09 crc kubenswrapper[4721]: I1202 12:08:09.565542 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b79c462f-8fbc-41aa-9f98-c00c17ff9705-apiservice-cert\") pod \"keystone-operator-controller-manager-77bbd48959-gdffp\" (UID: \"b79c462f-8fbc-41aa-9f98-c00c17ff9705\") " pod="openstack-operators/keystone-operator-controller-manager-77bbd48959-gdffp" Dec 02 12:08:09 crc kubenswrapper[4721]: I1202 12:08:09.565566 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b79c462f-8fbc-41aa-9f98-c00c17ff9705-webhook-cert\") pod \"keystone-operator-controller-manager-77bbd48959-gdffp\" (UID: \"b79c462f-8fbc-41aa-9f98-c00c17ff9705\") " pod="openstack-operators/keystone-operator-controller-manager-77bbd48959-gdffp" Dec 02 12:08:09 crc kubenswrapper[4721]: I1202 12:08:09.578015 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vvqk\" (UniqueName: \"kubernetes.io/projected/b79c462f-8fbc-41aa-9f98-c00c17ff9705-kube-api-access-7vvqk\") pod \"keystone-operator-controller-manager-77bbd48959-gdffp\" (UID: \"b79c462f-8fbc-41aa-9f98-c00c17ff9705\") " pod="openstack-operators/keystone-operator-controller-manager-77bbd48959-gdffp" Dec 02 12:08:09 crc kubenswrapper[4721]: I1202 12:08:09.748956 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-77bbd48959-gdffp" Dec 02 12:08:10 crc kubenswrapper[4721]: I1202 12:08:10.170094 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-77bbd48959-gdffp"] Dec 02 12:08:10 crc kubenswrapper[4721]: I1202 12:08:10.490984 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-77bbd48959-gdffp" event={"ID":"b79c462f-8fbc-41aa-9f98-c00c17ff9705","Type":"ContainerStarted","Data":"2c0c77ef3c312ab246dedc520d3148e106fe65d870a44dd516f6f76ed01e6bfb"} Dec 02 12:08:13 crc kubenswrapper[4721]: I1202 12:08:13.515907 4721 generic.go:334] "Generic (PLEG): container finished" podID="89920af2-3af7-4428-80fa-ddf2ba97dfb9" containerID="c07b2c4f609e7871d95b88adeef4fe6df645e60283b4626904994275357d4003" exitCode=0 Dec 02 12:08:13 crc kubenswrapper[4721]: I1202 12:08:13.515987 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/rabbitmq-server-0" event={"ID":"89920af2-3af7-4428-80fa-ddf2ba97dfb9","Type":"ContainerDied","Data":"c07b2c4f609e7871d95b88adeef4fe6df645e60283b4626904994275357d4003"} Dec 02 12:08:13 crc kubenswrapper[4721]: I1202 12:08:13.518759 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-77bbd48959-gdffp" event={"ID":"b79c462f-8fbc-41aa-9f98-c00c17ff9705","Type":"ContainerStarted","Data":"8d2a18f36234e4505df388a3d89d4d8b09ccfbcca43b497db7586e886a614f2b"} Dec 02 12:08:13 crc kubenswrapper[4721]: I1202 12:08:13.519072 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-77bbd48959-gdffp" Dec 02 12:08:13 crc kubenswrapper[4721]: I1202 12:08:13.557263 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-77bbd48959-gdffp" podStartSLOduration=1.735086501 podStartE2EDuration="4.557249612s" podCreationTimestamp="2025-12-02 12:08:09 +0000 UTC" firstStartedPulling="2025-12-02 12:08:10.184362445 +0000 UTC m=+1019.922912804" lastFinishedPulling="2025-12-02 12:08:13.006525566 +0000 UTC m=+1022.745075915" observedRunningTime="2025-12-02 12:08:13.554337232 +0000 UTC m=+1023.292887581" watchObservedRunningTime="2025-12-02 12:08:13.557249612 +0000 UTC m=+1023.295799961" Dec 02 12:08:14 crc kubenswrapper[4721]: I1202 12:08:14.527463 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/rabbitmq-server-0" event={"ID":"89920af2-3af7-4428-80fa-ddf2ba97dfb9","Type":"ContainerStarted","Data":"3cc5505dcaf4c8eed98a80d0bcc290be4d06c7a19329d3cb28c29862fb1f1188"} Dec 02 12:08:14 crc kubenswrapper[4721]: I1202 12:08:14.528657 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:08:14 crc kubenswrapper[4721]: I1202 12:08:14.560497 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/rabbitmq-server-0" podStartSLOduration=36.744704334 podStartE2EDuration="41.560480215s" podCreationTimestamp="2025-12-02 12:07:33 +0000 UTC" firstStartedPulling="2025-12-02 12:07:35.194500601 +0000 UTC m=+984.933050970" lastFinishedPulling="2025-12-02 12:07:40.010276492 +0000 UTC m=+989.748826851" observedRunningTime="2025-12-02 12:08:14.559299808 +0000 UTC m=+1024.297850167" watchObservedRunningTime="2025-12-02 12:08:14.560480215 +0000 UTC m=+1024.299030564" Dec 02 12:08:19 crc kubenswrapper[4721]: I1202 12:08:19.754678 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-77bbd48959-gdffp" Dec 02 12:08:24 crc kubenswrapper[4721]: I1202 12:08:24.585669 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/keystone-1088-account-create-update-qxbc8"] Dec 02 12:08:24 crc kubenswrapper[4721]: I1202 12:08:24.587201 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-1088-account-create-update-qxbc8" Dec 02 12:08:24 crc kubenswrapper[4721]: I1202 12:08:24.589501 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-db-secret" Dec 02 12:08:24 crc kubenswrapper[4721]: I1202 12:08:24.611139 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-1088-account-create-update-qxbc8"] Dec 02 12:08:24 crc kubenswrapper[4721]: I1202 12:08:24.685405 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/keystone-db-create-q9phr"] Dec 02 12:08:24 crc kubenswrapper[4721]: I1202 12:08:24.686336 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-db-create-q9phr" Dec 02 12:08:24 crc kubenswrapper[4721]: I1202 12:08:24.703224 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-db-create-q9phr"] Dec 02 12:08:24 crc kubenswrapper[4721]: I1202 12:08:24.722526 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:08:24 crc kubenswrapper[4721]: I1202 12:08:24.773448 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxl99\" (UniqueName: \"kubernetes.io/projected/783d4f63-0dcf-433e-8485-ccc511a31589-kube-api-access-vxl99\") pod \"keystone-1088-account-create-update-qxbc8\" (UID: \"783d4f63-0dcf-433e-8485-ccc511a31589\") " pod="swift-kuttl-tests/keystone-1088-account-create-update-qxbc8" Dec 02 12:08:24 crc kubenswrapper[4721]: I1202 12:08:24.773697 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/783d4f63-0dcf-433e-8485-ccc511a31589-operator-scripts\") pod \"keystone-1088-account-create-update-qxbc8\" (UID: \"783d4f63-0dcf-433e-8485-ccc511a31589\") " pod="swift-kuttl-tests/keystone-1088-account-create-update-qxbc8" Dec 02 12:08:24 crc kubenswrapper[4721]: I1202 12:08:24.875329 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kv9ls\" (UniqueName: \"kubernetes.io/projected/7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620-kube-api-access-kv9ls\") pod \"keystone-db-create-q9phr\" (UID: \"7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620\") " pod="swift-kuttl-tests/keystone-db-create-q9phr" Dec 02 12:08:24 crc kubenswrapper[4721]: I1202 12:08:24.875886 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/783d4f63-0dcf-433e-8485-ccc511a31589-operator-scripts\") pod \"keystone-1088-account-create-update-qxbc8\" (UID: \"783d4f63-0dcf-433e-8485-ccc511a31589\") " pod="swift-kuttl-tests/keystone-1088-account-create-update-qxbc8" Dec 02 12:08:24 crc kubenswrapper[4721]: I1202 12:08:24.876610 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/783d4f63-0dcf-433e-8485-ccc511a31589-operator-scripts\") pod \"keystone-1088-account-create-update-qxbc8\" (UID: \"783d4f63-0dcf-433e-8485-ccc511a31589\") " pod="swift-kuttl-tests/keystone-1088-account-create-update-qxbc8" Dec 02 12:08:24 crc kubenswrapper[4721]: I1202 12:08:24.876664 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620-operator-scripts\") pod \"keystone-db-create-q9phr\" (UID: \"7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620\") " pod="swift-kuttl-tests/keystone-db-create-q9phr" Dec 02 12:08:24 crc kubenswrapper[4721]: I1202 12:08:24.876977 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxl99\" (UniqueName: \"kubernetes.io/projected/783d4f63-0dcf-433e-8485-ccc511a31589-kube-api-access-vxl99\") pod \"keystone-1088-account-create-update-qxbc8\" (UID: \"783d4f63-0dcf-433e-8485-ccc511a31589\") " pod="swift-kuttl-tests/keystone-1088-account-create-update-qxbc8" Dec 02 12:08:24 crc kubenswrapper[4721]: I1202 12:08:24.901074 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxl99\" (UniqueName: \"kubernetes.io/projected/783d4f63-0dcf-433e-8485-ccc511a31589-kube-api-access-vxl99\") pod \"keystone-1088-account-create-update-qxbc8\" (UID: \"783d4f63-0dcf-433e-8485-ccc511a31589\") " pod="swift-kuttl-tests/keystone-1088-account-create-update-qxbc8" Dec 02 12:08:24 crc kubenswrapper[4721]: I1202 12:08:24.909185 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-1088-account-create-update-qxbc8" Dec 02 12:08:24 crc kubenswrapper[4721]: I1202 12:08:24.978946 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620-operator-scripts\") pod \"keystone-db-create-q9phr\" (UID: \"7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620\") " pod="swift-kuttl-tests/keystone-db-create-q9phr" Dec 02 12:08:24 crc kubenswrapper[4721]: I1202 12:08:24.979070 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kv9ls\" (UniqueName: \"kubernetes.io/projected/7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620-kube-api-access-kv9ls\") pod \"keystone-db-create-q9phr\" (UID: \"7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620\") " pod="swift-kuttl-tests/keystone-db-create-q9phr" Dec 02 12:08:24 crc kubenswrapper[4721]: I1202 12:08:24.980038 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620-operator-scripts\") pod \"keystone-db-create-q9phr\" (UID: \"7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620\") " pod="swift-kuttl-tests/keystone-db-create-q9phr" Dec 02 12:08:24 crc kubenswrapper[4721]: I1202 12:08:24.998826 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kv9ls\" (UniqueName: \"kubernetes.io/projected/7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620-kube-api-access-kv9ls\") pod \"keystone-db-create-q9phr\" (UID: \"7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620\") " pod="swift-kuttl-tests/keystone-db-create-q9phr" Dec 02 12:08:25 crc kubenswrapper[4721]: I1202 12:08:25.002441 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-db-create-q9phr" Dec 02 12:08:25 crc kubenswrapper[4721]: I1202 12:08:25.190839 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-1088-account-create-update-qxbc8"] Dec 02 12:08:25 crc kubenswrapper[4721]: I1202 12:08:25.249511 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-db-create-q9phr"] Dec 02 12:08:25 crc kubenswrapper[4721]: W1202 12:08:25.263763 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7640cbd4_a5b8_4bb1_9fa8_d2d3eaa74620.slice/crio-c20e700c7e56a1d78a7bf65be474b18738b0f2f3179b313cbc5be26189b84b3f WatchSource:0}: Error finding container c20e700c7e56a1d78a7bf65be474b18738b0f2f3179b313cbc5be26189b84b3f: Status 404 returned error can't find the container with id c20e700c7e56a1d78a7bf65be474b18738b0f2f3179b313cbc5be26189b84b3f Dec 02 12:08:25 crc kubenswrapper[4721]: I1202 12:08:25.591463 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-db-create-q9phr" event={"ID":"7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620","Type":"ContainerStarted","Data":"c20e700c7e56a1d78a7bf65be474b18738b0f2f3179b313cbc5be26189b84b3f"} Dec 02 12:08:25 crc kubenswrapper[4721]: I1202 12:08:25.592734 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-1088-account-create-update-qxbc8" event={"ID":"783d4f63-0dcf-433e-8485-ccc511a31589","Type":"ContainerStarted","Data":"81c853a1a44abcfcfb6b2ea71b236fa7d04f13b6532b1232c03ee02be14da522"} Dec 02 12:08:25 crc kubenswrapper[4721]: I1202 12:08:25.703158 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mcr6s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 12:08:25 crc kubenswrapper[4721]: I1202 12:08:25.703224 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 12:08:25 crc kubenswrapper[4721]: I1202 12:08:25.703265 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" Dec 02 12:08:25 crc kubenswrapper[4721]: I1202 12:08:25.703723 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"454276a48c0c3533be2304833299320cbec4257417769197c8e6697daf1879ef"} pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 12:08:25 crc kubenswrapper[4721]: I1202 12:08:25.703777 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" containerID="cri-o://454276a48c0c3533be2304833299320cbec4257417769197c8e6697daf1879ef" gracePeriod=600 Dec 02 12:08:26 crc kubenswrapper[4721]: I1202 12:08:26.168703 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-index-tmvr4"] Dec 02 12:08:26 crc kubenswrapper[4721]: I1202 12:08:26.169972 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-index-tmvr4" Dec 02 12:08:26 crc kubenswrapper[4721]: I1202 12:08:26.173320 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-index-dockercfg-qffzr" Dec 02 12:08:26 crc kubenswrapper[4721]: I1202 12:08:26.176555 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-index-tmvr4"] Dec 02 12:08:26 crc kubenswrapper[4721]: I1202 12:08:26.209021 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ckxw\" (UniqueName: \"kubernetes.io/projected/89b339ea-d21f-4d65-bb93-926e793838ef-kube-api-access-4ckxw\") pod \"barbican-operator-index-tmvr4\" (UID: \"89b339ea-d21f-4d65-bb93-926e793838ef\") " pod="openstack-operators/barbican-operator-index-tmvr4" Dec 02 12:08:26 crc kubenswrapper[4721]: I1202 12:08:26.310885 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ckxw\" (UniqueName: \"kubernetes.io/projected/89b339ea-d21f-4d65-bb93-926e793838ef-kube-api-access-4ckxw\") pod \"barbican-operator-index-tmvr4\" (UID: \"89b339ea-d21f-4d65-bb93-926e793838ef\") " pod="openstack-operators/barbican-operator-index-tmvr4" Dec 02 12:08:26 crc kubenswrapper[4721]: I1202 12:08:26.346435 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ckxw\" (UniqueName: \"kubernetes.io/projected/89b339ea-d21f-4d65-bb93-926e793838ef-kube-api-access-4ckxw\") pod \"barbican-operator-index-tmvr4\" (UID: \"89b339ea-d21f-4d65-bb93-926e793838ef\") " pod="openstack-operators/barbican-operator-index-tmvr4" Dec 02 12:08:26 crc kubenswrapper[4721]: I1202 12:08:26.523844 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-index-tmvr4" Dec 02 12:08:26 crc kubenswrapper[4721]: I1202 12:08:26.602431 4721 generic.go:334] "Generic (PLEG): container finished" podID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerID="454276a48c0c3533be2304833299320cbec4257417769197c8e6697daf1879ef" exitCode=0 Dec 02 12:08:26 crc kubenswrapper[4721]: I1202 12:08:26.602484 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" event={"ID":"141842b6-fd0c-48e6-9795-a627e6f96bd4","Type":"ContainerDied","Data":"454276a48c0c3533be2304833299320cbec4257417769197c8e6697daf1879ef"} Dec 02 12:08:26 crc kubenswrapper[4721]: I1202 12:08:26.602509 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" event={"ID":"141842b6-fd0c-48e6-9795-a627e6f96bd4","Type":"ContainerStarted","Data":"53f7071ac35634b832b9710cc1dcc9432696ff76188da7c6520c40b950115114"} Dec 02 12:08:26 crc kubenswrapper[4721]: I1202 12:08:26.602526 4721 scope.go:117] "RemoveContainer" containerID="240b7cf6f3c5851e9a4d93bbfdd59e21dcf90d1e5f54d8148006085d773e5e7b" Dec 02 12:08:26 crc kubenswrapper[4721]: I1202 12:08:26.606476 4721 generic.go:334] "Generic (PLEG): container finished" podID="7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620" containerID="3a09d2a686d153d1883de807db818b9a01ac2db58431e30879a72e21f4a5f38c" exitCode=0 Dec 02 12:08:26 crc kubenswrapper[4721]: I1202 12:08:26.606534 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-db-create-q9phr" event={"ID":"7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620","Type":"ContainerDied","Data":"3a09d2a686d153d1883de807db818b9a01ac2db58431e30879a72e21f4a5f38c"} Dec 02 12:08:26 crc kubenswrapper[4721]: I1202 12:08:26.611626 4721 generic.go:334] "Generic (PLEG): container finished" podID="783d4f63-0dcf-433e-8485-ccc511a31589" containerID="989a2a67669099b9a0dcfb4dbef74860804b11705d940e4a4b9b264c9d38d30e" exitCode=0 Dec 02 12:08:26 crc kubenswrapper[4721]: I1202 12:08:26.611668 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-1088-account-create-update-qxbc8" event={"ID":"783d4f63-0dcf-433e-8485-ccc511a31589","Type":"ContainerDied","Data":"989a2a67669099b9a0dcfb4dbef74860804b11705d940e4a4b9b264c9d38d30e"} Dec 02 12:08:26 crc kubenswrapper[4721]: I1202 12:08:26.917036 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-index-tmvr4"] Dec 02 12:08:27 crc kubenswrapper[4721]: I1202 12:08:27.619967 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-index-tmvr4" event={"ID":"89b339ea-d21f-4d65-bb93-926e793838ef","Type":"ContainerStarted","Data":"f30c576a3682a82f8dbfaf4d4b6436aa2613cba8247ce8bd73cf53f37fc14c98"} Dec 02 12:08:27 crc kubenswrapper[4721]: I1202 12:08:27.989609 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-1088-account-create-update-qxbc8" Dec 02 12:08:27 crc kubenswrapper[4721]: I1202 12:08:27.995281 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-db-create-q9phr" Dec 02 12:08:28 crc kubenswrapper[4721]: I1202 12:08:28.141691 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/783d4f63-0dcf-433e-8485-ccc511a31589-operator-scripts\") pod \"783d4f63-0dcf-433e-8485-ccc511a31589\" (UID: \"783d4f63-0dcf-433e-8485-ccc511a31589\") " Dec 02 12:08:28 crc kubenswrapper[4721]: I1202 12:08:28.142160 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620-operator-scripts\") pod \"7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620\" (UID: \"7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620\") " Dec 02 12:08:28 crc kubenswrapper[4721]: I1202 12:08:28.142211 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kv9ls\" (UniqueName: \"kubernetes.io/projected/7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620-kube-api-access-kv9ls\") pod \"7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620\" (UID: \"7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620\") " Dec 02 12:08:28 crc kubenswrapper[4721]: I1202 12:08:28.142249 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vxl99\" (UniqueName: \"kubernetes.io/projected/783d4f63-0dcf-433e-8485-ccc511a31589-kube-api-access-vxl99\") pod \"783d4f63-0dcf-433e-8485-ccc511a31589\" (UID: \"783d4f63-0dcf-433e-8485-ccc511a31589\") " Dec 02 12:08:28 crc kubenswrapper[4721]: I1202 12:08:28.143305 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620" (UID: "7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:08:28 crc kubenswrapper[4721]: I1202 12:08:28.143707 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/783d4f63-0dcf-433e-8485-ccc511a31589-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "783d4f63-0dcf-433e-8485-ccc511a31589" (UID: "783d4f63-0dcf-433e-8485-ccc511a31589"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:08:28 crc kubenswrapper[4721]: I1202 12:08:28.147474 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/783d4f63-0dcf-433e-8485-ccc511a31589-kube-api-access-vxl99" (OuterVolumeSpecName: "kube-api-access-vxl99") pod "783d4f63-0dcf-433e-8485-ccc511a31589" (UID: "783d4f63-0dcf-433e-8485-ccc511a31589"). InnerVolumeSpecName "kube-api-access-vxl99". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:08:28 crc kubenswrapper[4721]: I1202 12:08:28.154540 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620-kube-api-access-kv9ls" (OuterVolumeSpecName: "kube-api-access-kv9ls") pod "7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620" (UID: "7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620"). InnerVolumeSpecName "kube-api-access-kv9ls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:08:28 crc kubenswrapper[4721]: I1202 12:08:28.243805 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/783d4f63-0dcf-433e-8485-ccc511a31589-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:08:28 crc kubenswrapper[4721]: I1202 12:08:28.243839 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:08:28 crc kubenswrapper[4721]: I1202 12:08:28.243852 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kv9ls\" (UniqueName: \"kubernetes.io/projected/7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620-kube-api-access-kv9ls\") on node \"crc\" DevicePath \"\"" Dec 02 12:08:28 crc kubenswrapper[4721]: I1202 12:08:28.243866 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vxl99\" (UniqueName: \"kubernetes.io/projected/783d4f63-0dcf-433e-8485-ccc511a31589-kube-api-access-vxl99\") on node \"crc\" DevicePath \"\"" Dec 02 12:08:28 crc kubenswrapper[4721]: I1202 12:08:28.630449 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-index-tmvr4" event={"ID":"89b339ea-d21f-4d65-bb93-926e793838ef","Type":"ContainerStarted","Data":"a1ec8b2f2103a8ef24ac54eb8c030ec2dcf7d68467f9ea7996722b3eeb6ab815"} Dec 02 12:08:28 crc kubenswrapper[4721]: I1202 12:08:28.632578 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-db-create-q9phr" event={"ID":"7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620","Type":"ContainerDied","Data":"c20e700c7e56a1d78a7bf65be474b18738b0f2f3179b313cbc5be26189b84b3f"} Dec 02 12:08:28 crc kubenswrapper[4721]: I1202 12:08:28.632607 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c20e700c7e56a1d78a7bf65be474b18738b0f2f3179b313cbc5be26189b84b3f" Dec 02 12:08:28 crc kubenswrapper[4721]: I1202 12:08:28.633270 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-db-create-q9phr" Dec 02 12:08:28 crc kubenswrapper[4721]: I1202 12:08:28.635821 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-1088-account-create-update-qxbc8" event={"ID":"783d4f63-0dcf-433e-8485-ccc511a31589","Type":"ContainerDied","Data":"81c853a1a44abcfcfb6b2ea71b236fa7d04f13b6532b1232c03ee02be14da522"} Dec 02 12:08:28 crc kubenswrapper[4721]: I1202 12:08:28.635851 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="81c853a1a44abcfcfb6b2ea71b236fa7d04f13b6532b1232c03ee02be14da522" Dec 02 12:08:28 crc kubenswrapper[4721]: I1202 12:08:28.635893 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-1088-account-create-update-qxbc8" Dec 02 12:08:28 crc kubenswrapper[4721]: I1202 12:08:28.652206 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-index-tmvr4" podStartSLOduration=1.333661473 podStartE2EDuration="2.652183302s" podCreationTimestamp="2025-12-02 12:08:26 +0000 UTC" firstStartedPulling="2025-12-02 12:08:26.926336556 +0000 UTC m=+1036.664886905" lastFinishedPulling="2025-12-02 12:08:28.244858385 +0000 UTC m=+1037.983408734" observedRunningTime="2025-12-02 12:08:28.647758655 +0000 UTC m=+1038.386309004" watchObservedRunningTime="2025-12-02 12:08:28.652183302 +0000 UTC m=+1038.390733651" Dec 02 12:08:30 crc kubenswrapper[4721]: I1202 12:08:30.330642 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/keystone-db-sync-r2n9m"] Dec 02 12:08:30 crc kubenswrapper[4721]: E1202 12:08:30.331234 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620" containerName="mariadb-database-create" Dec 02 12:08:30 crc kubenswrapper[4721]: I1202 12:08:30.331252 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620" containerName="mariadb-database-create" Dec 02 12:08:30 crc kubenswrapper[4721]: E1202 12:08:30.331277 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="783d4f63-0dcf-433e-8485-ccc511a31589" containerName="mariadb-account-create-update" Dec 02 12:08:30 crc kubenswrapper[4721]: I1202 12:08:30.331286 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="783d4f63-0dcf-433e-8485-ccc511a31589" containerName="mariadb-account-create-update" Dec 02 12:08:30 crc kubenswrapper[4721]: I1202 12:08:30.331610 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620" containerName="mariadb-database-create" Dec 02 12:08:30 crc kubenswrapper[4721]: I1202 12:08:30.331633 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="783d4f63-0dcf-433e-8485-ccc511a31589" containerName="mariadb-account-create-update" Dec 02 12:08:30 crc kubenswrapper[4721]: I1202 12:08:30.332160 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-db-sync-r2n9m" Dec 02 12:08:30 crc kubenswrapper[4721]: I1202 12:08:30.334140 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-scripts" Dec 02 12:08:30 crc kubenswrapper[4721]: I1202 12:08:30.334675 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone" Dec 02 12:08:30 crc kubenswrapper[4721]: I1202 12:08:30.334886 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-keystone-dockercfg-rpvw9" Dec 02 12:08:30 crc kubenswrapper[4721]: I1202 12:08:30.335050 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-config-data" Dec 02 12:08:30 crc kubenswrapper[4721]: I1202 12:08:30.338908 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-db-sync-r2n9m"] Dec 02 12:08:30 crc kubenswrapper[4721]: I1202 12:08:30.481071 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45md2\" (UniqueName: \"kubernetes.io/projected/fc273c30-90d7-4b50-bf60-0e3292eed557-kube-api-access-45md2\") pod \"keystone-db-sync-r2n9m\" (UID: \"fc273c30-90d7-4b50-bf60-0e3292eed557\") " pod="swift-kuttl-tests/keystone-db-sync-r2n9m" Dec 02 12:08:30 crc kubenswrapper[4721]: I1202 12:08:30.481132 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc273c30-90d7-4b50-bf60-0e3292eed557-config-data\") pod \"keystone-db-sync-r2n9m\" (UID: \"fc273c30-90d7-4b50-bf60-0e3292eed557\") " pod="swift-kuttl-tests/keystone-db-sync-r2n9m" Dec 02 12:08:30 crc kubenswrapper[4721]: I1202 12:08:30.581934 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc273c30-90d7-4b50-bf60-0e3292eed557-config-data\") pod \"keystone-db-sync-r2n9m\" (UID: \"fc273c30-90d7-4b50-bf60-0e3292eed557\") " pod="swift-kuttl-tests/keystone-db-sync-r2n9m" Dec 02 12:08:30 crc kubenswrapper[4721]: I1202 12:08:30.582049 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45md2\" (UniqueName: \"kubernetes.io/projected/fc273c30-90d7-4b50-bf60-0e3292eed557-kube-api-access-45md2\") pod \"keystone-db-sync-r2n9m\" (UID: \"fc273c30-90d7-4b50-bf60-0e3292eed557\") " pod="swift-kuttl-tests/keystone-db-sync-r2n9m" Dec 02 12:08:30 crc kubenswrapper[4721]: I1202 12:08:30.588310 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc273c30-90d7-4b50-bf60-0e3292eed557-config-data\") pod \"keystone-db-sync-r2n9m\" (UID: \"fc273c30-90d7-4b50-bf60-0e3292eed557\") " pod="swift-kuttl-tests/keystone-db-sync-r2n9m" Dec 02 12:08:30 crc kubenswrapper[4721]: I1202 12:08:30.606143 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45md2\" (UniqueName: \"kubernetes.io/projected/fc273c30-90d7-4b50-bf60-0e3292eed557-kube-api-access-45md2\") pod \"keystone-db-sync-r2n9m\" (UID: \"fc273c30-90d7-4b50-bf60-0e3292eed557\") " pod="swift-kuttl-tests/keystone-db-sync-r2n9m" Dec 02 12:08:30 crc kubenswrapper[4721]: I1202 12:08:30.646809 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-db-sync-r2n9m" Dec 02 12:08:31 crc kubenswrapper[4721]: I1202 12:08:31.034275 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-db-sync-r2n9m"] Dec 02 12:08:31 crc kubenswrapper[4721]: W1202 12:08:31.037695 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc273c30_90d7_4b50_bf60_0e3292eed557.slice/crio-719a202d0cc74e9b83a7e689572e1a2d6685b5ea5294d4d137e249973b50ded8 WatchSource:0}: Error finding container 719a202d0cc74e9b83a7e689572e1a2d6685b5ea5294d4d137e249973b50ded8: Status 404 returned error can't find the container with id 719a202d0cc74e9b83a7e689572e1a2d6685b5ea5294d4d137e249973b50ded8 Dec 02 12:08:31 crc kubenswrapper[4721]: I1202 12:08:31.652195 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-db-sync-r2n9m" event={"ID":"fc273c30-90d7-4b50-bf60-0e3292eed557","Type":"ContainerStarted","Data":"719a202d0cc74e9b83a7e689572e1a2d6685b5ea5294d4d137e249973b50ded8"} Dec 02 12:08:36 crc kubenswrapper[4721]: I1202 12:08:36.569866 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/barbican-operator-index-tmvr4" Dec 02 12:08:36 crc kubenswrapper[4721]: I1202 12:08:36.570595 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-index-tmvr4" Dec 02 12:08:36 crc kubenswrapper[4721]: I1202 12:08:36.599137 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/barbican-operator-index-tmvr4" Dec 02 12:08:36 crc kubenswrapper[4721]: I1202 12:08:36.902947 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-index-tmvr4" Dec 02 12:08:40 crc kubenswrapper[4721]: I1202 12:08:40.908481 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-db-sync-r2n9m" event={"ID":"fc273c30-90d7-4b50-bf60-0e3292eed557","Type":"ContainerStarted","Data":"35c98f8a826109cd5f3068477fa1fcb608d809c15e6bf77199d49da95401ab26"} Dec 02 12:08:40 crc kubenswrapper[4721]: I1202 12:08:40.925819 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/keystone-db-sync-r2n9m" podStartSLOduration=1.331175808 podStartE2EDuration="10.925802055s" podCreationTimestamp="2025-12-02 12:08:30 +0000 UTC" firstStartedPulling="2025-12-02 12:08:31.041068867 +0000 UTC m=+1040.779619216" lastFinishedPulling="2025-12-02 12:08:40.635695114 +0000 UTC m=+1050.374245463" observedRunningTime="2025-12-02 12:08:40.922130573 +0000 UTC m=+1050.660680932" watchObservedRunningTime="2025-12-02 12:08:40.925802055 +0000 UTC m=+1050.664352404" Dec 02 12:08:43 crc kubenswrapper[4721]: I1202 12:08:43.796609 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf"] Dec 02 12:08:43 crc kubenswrapper[4721]: I1202 12:08:43.798175 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf" Dec 02 12:08:43 crc kubenswrapper[4721]: I1202 12:08:43.804206 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-6fpcc" Dec 02 12:08:43 crc kubenswrapper[4721]: I1202 12:08:43.811301 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf"] Dec 02 12:08:43 crc kubenswrapper[4721]: I1202 12:08:43.873163 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/317859f7-dacf-4980-ac45-8d39758ac1cd-util\") pod \"ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf\" (UID: \"317859f7-dacf-4980-ac45-8d39758ac1cd\") " pod="openstack-operators/ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf" Dec 02 12:08:43 crc kubenswrapper[4721]: I1202 12:08:43.873224 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/317859f7-dacf-4980-ac45-8d39758ac1cd-bundle\") pod \"ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf\" (UID: \"317859f7-dacf-4980-ac45-8d39758ac1cd\") " pod="openstack-operators/ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf" Dec 02 12:08:43 crc kubenswrapper[4721]: I1202 12:08:43.873497 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qn5hr\" (UniqueName: \"kubernetes.io/projected/317859f7-dacf-4980-ac45-8d39758ac1cd-kube-api-access-qn5hr\") pod \"ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf\" (UID: \"317859f7-dacf-4980-ac45-8d39758ac1cd\") " pod="openstack-operators/ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf" Dec 02 12:08:43 crc kubenswrapper[4721]: I1202 12:08:43.931397 4721 generic.go:334] "Generic (PLEG): container finished" podID="fc273c30-90d7-4b50-bf60-0e3292eed557" containerID="35c98f8a826109cd5f3068477fa1fcb608d809c15e6bf77199d49da95401ab26" exitCode=0 Dec 02 12:08:43 crc kubenswrapper[4721]: I1202 12:08:43.931445 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-db-sync-r2n9m" event={"ID":"fc273c30-90d7-4b50-bf60-0e3292eed557","Type":"ContainerDied","Data":"35c98f8a826109cd5f3068477fa1fcb608d809c15e6bf77199d49da95401ab26"} Dec 02 12:08:43 crc kubenswrapper[4721]: I1202 12:08:43.975332 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/317859f7-dacf-4980-ac45-8d39758ac1cd-util\") pod \"ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf\" (UID: \"317859f7-dacf-4980-ac45-8d39758ac1cd\") " pod="openstack-operators/ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf" Dec 02 12:08:43 crc kubenswrapper[4721]: I1202 12:08:43.975429 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/317859f7-dacf-4980-ac45-8d39758ac1cd-bundle\") pod \"ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf\" (UID: \"317859f7-dacf-4980-ac45-8d39758ac1cd\") " pod="openstack-operators/ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf" Dec 02 12:08:43 crc kubenswrapper[4721]: I1202 12:08:43.975502 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qn5hr\" (UniqueName: \"kubernetes.io/projected/317859f7-dacf-4980-ac45-8d39758ac1cd-kube-api-access-qn5hr\") pod \"ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf\" (UID: \"317859f7-dacf-4980-ac45-8d39758ac1cd\") " pod="openstack-operators/ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf" Dec 02 12:08:43 crc kubenswrapper[4721]: I1202 12:08:43.975714 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/317859f7-dacf-4980-ac45-8d39758ac1cd-util\") pod \"ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf\" (UID: \"317859f7-dacf-4980-ac45-8d39758ac1cd\") " pod="openstack-operators/ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf" Dec 02 12:08:43 crc kubenswrapper[4721]: I1202 12:08:43.975946 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/317859f7-dacf-4980-ac45-8d39758ac1cd-bundle\") pod \"ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf\" (UID: \"317859f7-dacf-4980-ac45-8d39758ac1cd\") " pod="openstack-operators/ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf" Dec 02 12:08:43 crc kubenswrapper[4721]: I1202 12:08:43.994599 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qn5hr\" (UniqueName: \"kubernetes.io/projected/317859f7-dacf-4980-ac45-8d39758ac1cd-kube-api-access-qn5hr\") pod \"ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf\" (UID: \"317859f7-dacf-4980-ac45-8d39758ac1cd\") " pod="openstack-operators/ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf" Dec 02 12:08:44 crc kubenswrapper[4721]: I1202 12:08:44.219579 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf" Dec 02 12:08:44 crc kubenswrapper[4721]: I1202 12:08:44.627257 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf"] Dec 02 12:08:44 crc kubenswrapper[4721]: W1202 12:08:44.630899 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod317859f7_dacf_4980_ac45_8d39758ac1cd.slice/crio-cd716e848de2e9e75ecc563d51f6f716619ee2d1cffcb0cfa8e329d6fa002cd0 WatchSource:0}: Error finding container cd716e848de2e9e75ecc563d51f6f716619ee2d1cffcb0cfa8e329d6fa002cd0: Status 404 returned error can't find the container with id cd716e848de2e9e75ecc563d51f6f716619ee2d1cffcb0cfa8e329d6fa002cd0 Dec 02 12:08:44 crc kubenswrapper[4721]: I1202 12:08:44.938583 4721 generic.go:334] "Generic (PLEG): container finished" podID="317859f7-dacf-4980-ac45-8d39758ac1cd" containerID="646d2606fee5f76263970ee9d43a4a87465a4433a234be1bb22de42d414b0577" exitCode=0 Dec 02 12:08:44 crc kubenswrapper[4721]: I1202 12:08:44.938687 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf" event={"ID":"317859f7-dacf-4980-ac45-8d39758ac1cd","Type":"ContainerDied","Data":"646d2606fee5f76263970ee9d43a4a87465a4433a234be1bb22de42d414b0577"} Dec 02 12:08:44 crc kubenswrapper[4721]: I1202 12:08:44.938898 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf" event={"ID":"317859f7-dacf-4980-ac45-8d39758ac1cd","Type":"ContainerStarted","Data":"cd716e848de2e9e75ecc563d51f6f716619ee2d1cffcb0cfa8e329d6fa002cd0"} Dec 02 12:08:44 crc kubenswrapper[4721]: I1202 12:08:44.939987 4721 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 12:08:45 crc kubenswrapper[4721]: I1202 12:08:45.216885 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-db-sync-r2n9m" Dec 02 12:08:45 crc kubenswrapper[4721]: I1202 12:08:45.391269 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45md2\" (UniqueName: \"kubernetes.io/projected/fc273c30-90d7-4b50-bf60-0e3292eed557-kube-api-access-45md2\") pod \"fc273c30-90d7-4b50-bf60-0e3292eed557\" (UID: \"fc273c30-90d7-4b50-bf60-0e3292eed557\") " Dec 02 12:08:45 crc kubenswrapper[4721]: I1202 12:08:45.391357 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc273c30-90d7-4b50-bf60-0e3292eed557-config-data\") pod \"fc273c30-90d7-4b50-bf60-0e3292eed557\" (UID: \"fc273c30-90d7-4b50-bf60-0e3292eed557\") " Dec 02 12:08:45 crc kubenswrapper[4721]: I1202 12:08:45.396095 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc273c30-90d7-4b50-bf60-0e3292eed557-kube-api-access-45md2" (OuterVolumeSpecName: "kube-api-access-45md2") pod "fc273c30-90d7-4b50-bf60-0e3292eed557" (UID: "fc273c30-90d7-4b50-bf60-0e3292eed557"). InnerVolumeSpecName "kube-api-access-45md2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:08:45 crc kubenswrapper[4721]: I1202 12:08:45.425058 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc273c30-90d7-4b50-bf60-0e3292eed557-config-data" (OuterVolumeSpecName: "config-data") pod "fc273c30-90d7-4b50-bf60-0e3292eed557" (UID: "fc273c30-90d7-4b50-bf60-0e3292eed557"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:08:45 crc kubenswrapper[4721]: I1202 12:08:45.493048 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45md2\" (UniqueName: \"kubernetes.io/projected/fc273c30-90d7-4b50-bf60-0e3292eed557-kube-api-access-45md2\") on node \"crc\" DevicePath \"\"" Dec 02 12:08:45 crc kubenswrapper[4721]: I1202 12:08:45.493097 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc273c30-90d7-4b50-bf60-0e3292eed557-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 12:08:45 crc kubenswrapper[4721]: I1202 12:08:45.948189 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-db-sync-r2n9m" event={"ID":"fc273c30-90d7-4b50-bf60-0e3292eed557","Type":"ContainerDied","Data":"719a202d0cc74e9b83a7e689572e1a2d6685b5ea5294d4d137e249973b50ded8"} Dec 02 12:08:45 crc kubenswrapper[4721]: I1202 12:08:45.948223 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="719a202d0cc74e9b83a7e689572e1a2d6685b5ea5294d4d137e249973b50ded8" Dec 02 12:08:45 crc kubenswrapper[4721]: I1202 12:08:45.949824 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-db-sync-r2n9m" Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.138003 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/keystone-bootstrap-sbkb7"] Dec 02 12:08:46 crc kubenswrapper[4721]: E1202 12:08:46.138379 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc273c30-90d7-4b50-bf60-0e3292eed557" containerName="keystone-db-sync" Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.138402 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc273c30-90d7-4b50-bf60-0e3292eed557" containerName="keystone-db-sync" Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.138587 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc273c30-90d7-4b50-bf60-0e3292eed557" containerName="keystone-db-sync" Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.139178 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-bootstrap-sbkb7" Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.140976 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"osp-secret" Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.141710 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-scripts" Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.148295 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-bootstrap-sbkb7"] Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.155913 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone" Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.156104 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-config-data" Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.156218 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-keystone-dockercfg-rpvw9" Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.333490 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2lwq\" (UniqueName: \"kubernetes.io/projected/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-kube-api-access-v2lwq\") pod \"keystone-bootstrap-sbkb7\" (UID: \"cbe54636-ef0a-4d0d-bcbd-1990e8433d04\") " pod="swift-kuttl-tests/keystone-bootstrap-sbkb7" Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.333817 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-credential-keys\") pod \"keystone-bootstrap-sbkb7\" (UID: \"cbe54636-ef0a-4d0d-bcbd-1990e8433d04\") " pod="swift-kuttl-tests/keystone-bootstrap-sbkb7" Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.333851 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-config-data\") pod \"keystone-bootstrap-sbkb7\" (UID: \"cbe54636-ef0a-4d0d-bcbd-1990e8433d04\") " pod="swift-kuttl-tests/keystone-bootstrap-sbkb7" Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.333881 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-fernet-keys\") pod \"keystone-bootstrap-sbkb7\" (UID: \"cbe54636-ef0a-4d0d-bcbd-1990e8433d04\") " pod="swift-kuttl-tests/keystone-bootstrap-sbkb7" Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.333906 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-scripts\") pod \"keystone-bootstrap-sbkb7\" (UID: \"cbe54636-ef0a-4d0d-bcbd-1990e8433d04\") " pod="swift-kuttl-tests/keystone-bootstrap-sbkb7" Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.434966 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-credential-keys\") pod \"keystone-bootstrap-sbkb7\" (UID: \"cbe54636-ef0a-4d0d-bcbd-1990e8433d04\") " pod="swift-kuttl-tests/keystone-bootstrap-sbkb7" Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.435032 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-config-data\") pod \"keystone-bootstrap-sbkb7\" (UID: \"cbe54636-ef0a-4d0d-bcbd-1990e8433d04\") " pod="swift-kuttl-tests/keystone-bootstrap-sbkb7" Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.435051 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-fernet-keys\") pod \"keystone-bootstrap-sbkb7\" (UID: \"cbe54636-ef0a-4d0d-bcbd-1990e8433d04\") " pod="swift-kuttl-tests/keystone-bootstrap-sbkb7" Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.435079 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-scripts\") pod \"keystone-bootstrap-sbkb7\" (UID: \"cbe54636-ef0a-4d0d-bcbd-1990e8433d04\") " pod="swift-kuttl-tests/keystone-bootstrap-sbkb7" Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.435109 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2lwq\" (UniqueName: \"kubernetes.io/projected/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-kube-api-access-v2lwq\") pod \"keystone-bootstrap-sbkb7\" (UID: \"cbe54636-ef0a-4d0d-bcbd-1990e8433d04\") " pod="swift-kuttl-tests/keystone-bootstrap-sbkb7" Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.440917 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-credential-keys\") pod \"keystone-bootstrap-sbkb7\" (UID: \"cbe54636-ef0a-4d0d-bcbd-1990e8433d04\") " pod="swift-kuttl-tests/keystone-bootstrap-sbkb7" Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.442780 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-scripts\") pod \"keystone-bootstrap-sbkb7\" (UID: \"cbe54636-ef0a-4d0d-bcbd-1990e8433d04\") " pod="swift-kuttl-tests/keystone-bootstrap-sbkb7" Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.443137 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-fernet-keys\") pod \"keystone-bootstrap-sbkb7\" (UID: \"cbe54636-ef0a-4d0d-bcbd-1990e8433d04\") " pod="swift-kuttl-tests/keystone-bootstrap-sbkb7" Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.443208 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-config-data\") pod \"keystone-bootstrap-sbkb7\" (UID: \"cbe54636-ef0a-4d0d-bcbd-1990e8433d04\") " pod="swift-kuttl-tests/keystone-bootstrap-sbkb7" Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.450373 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2lwq\" (UniqueName: \"kubernetes.io/projected/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-kube-api-access-v2lwq\") pod \"keystone-bootstrap-sbkb7\" (UID: \"cbe54636-ef0a-4d0d-bcbd-1990e8433d04\") " pod="swift-kuttl-tests/keystone-bootstrap-sbkb7" Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.508014 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-bootstrap-sbkb7" Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.919193 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-bootstrap-sbkb7"] Dec 02 12:08:46 crc kubenswrapper[4721]: W1202 12:08:46.920599 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcbe54636_ef0a_4d0d_bcbd_1990e8433d04.slice/crio-80d0445cfb40346da6d4a2668428076d066df22795ef74ab5f27cdc16a99c5be WatchSource:0}: Error finding container 80d0445cfb40346da6d4a2668428076d066df22795ef74ab5f27cdc16a99c5be: Status 404 returned error can't find the container with id 80d0445cfb40346da6d4a2668428076d066df22795ef74ab5f27cdc16a99c5be Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.955545 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-bootstrap-sbkb7" event={"ID":"cbe54636-ef0a-4d0d-bcbd-1990e8433d04","Type":"ContainerStarted","Data":"80d0445cfb40346da6d4a2668428076d066df22795ef74ab5f27cdc16a99c5be"} Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.957485 4721 generic.go:334] "Generic (PLEG): container finished" podID="317859f7-dacf-4980-ac45-8d39758ac1cd" containerID="7a094837d14f6e8e9e3f9401ad182265db1d1a03ca5e4fbfbadd9310b2a3515b" exitCode=0 Dec 02 12:08:46 crc kubenswrapper[4721]: I1202 12:08:46.957520 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf" event={"ID":"317859f7-dacf-4980-ac45-8d39758ac1cd","Type":"ContainerDied","Data":"7a094837d14f6e8e9e3f9401ad182265db1d1a03ca5e4fbfbadd9310b2a3515b"} Dec 02 12:08:47 crc kubenswrapper[4721]: I1202 12:08:47.977060 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-bootstrap-sbkb7" event={"ID":"cbe54636-ef0a-4d0d-bcbd-1990e8433d04","Type":"ContainerStarted","Data":"015794c7a59acb77419f4cb9303b7c8104a1cffc470528cbb0728af7ce93d1bb"} Dec 02 12:08:47 crc kubenswrapper[4721]: I1202 12:08:47.980804 4721 generic.go:334] "Generic (PLEG): container finished" podID="317859f7-dacf-4980-ac45-8d39758ac1cd" containerID="fa529b8d14ca353f826dbcb7b3dcb20221b0e0b1d7224e9cf7e7ab88fc907250" exitCode=0 Dec 02 12:08:47 crc kubenswrapper[4721]: I1202 12:08:47.980840 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf" event={"ID":"317859f7-dacf-4980-ac45-8d39758ac1cd","Type":"ContainerDied","Data":"fa529b8d14ca353f826dbcb7b3dcb20221b0e0b1d7224e9cf7e7ab88fc907250"} Dec 02 12:08:47 crc kubenswrapper[4721]: I1202 12:08:47.995945 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/keystone-bootstrap-sbkb7" podStartSLOduration=1.995905705 podStartE2EDuration="1.995905705s" podCreationTimestamp="2025-12-02 12:08:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:08:47.99346357 +0000 UTC m=+1057.732013939" watchObservedRunningTime="2025-12-02 12:08:47.995905705 +0000 UTC m=+1057.734456094" Dec 02 12:08:49 crc kubenswrapper[4721]: I1202 12:08:49.245610 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf" Dec 02 12:08:49 crc kubenswrapper[4721]: I1202 12:08:49.276559 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/317859f7-dacf-4980-ac45-8d39758ac1cd-util\") pod \"317859f7-dacf-4980-ac45-8d39758ac1cd\" (UID: \"317859f7-dacf-4980-ac45-8d39758ac1cd\") " Dec 02 12:08:49 crc kubenswrapper[4721]: I1202 12:08:49.276646 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/317859f7-dacf-4980-ac45-8d39758ac1cd-bundle\") pod \"317859f7-dacf-4980-ac45-8d39758ac1cd\" (UID: \"317859f7-dacf-4980-ac45-8d39758ac1cd\") " Dec 02 12:08:49 crc kubenswrapper[4721]: I1202 12:08:49.276705 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qn5hr\" (UniqueName: \"kubernetes.io/projected/317859f7-dacf-4980-ac45-8d39758ac1cd-kube-api-access-qn5hr\") pod \"317859f7-dacf-4980-ac45-8d39758ac1cd\" (UID: \"317859f7-dacf-4980-ac45-8d39758ac1cd\") " Dec 02 12:08:49 crc kubenswrapper[4721]: I1202 12:08:49.277755 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/317859f7-dacf-4980-ac45-8d39758ac1cd-bundle" (OuterVolumeSpecName: "bundle") pod "317859f7-dacf-4980-ac45-8d39758ac1cd" (UID: "317859f7-dacf-4980-ac45-8d39758ac1cd"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:08:49 crc kubenswrapper[4721]: I1202 12:08:49.282052 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/317859f7-dacf-4980-ac45-8d39758ac1cd-kube-api-access-qn5hr" (OuterVolumeSpecName: "kube-api-access-qn5hr") pod "317859f7-dacf-4980-ac45-8d39758ac1cd" (UID: "317859f7-dacf-4980-ac45-8d39758ac1cd"). InnerVolumeSpecName "kube-api-access-qn5hr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:08:49 crc kubenswrapper[4721]: I1202 12:08:49.299937 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/317859f7-dacf-4980-ac45-8d39758ac1cd-util" (OuterVolumeSpecName: "util") pod "317859f7-dacf-4980-ac45-8d39758ac1cd" (UID: "317859f7-dacf-4980-ac45-8d39758ac1cd"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:08:49 crc kubenswrapper[4721]: I1202 12:08:49.378505 4721 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/317859f7-dacf-4980-ac45-8d39758ac1cd-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 12:08:49 crc kubenswrapper[4721]: I1202 12:08:49.378551 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qn5hr\" (UniqueName: \"kubernetes.io/projected/317859f7-dacf-4980-ac45-8d39758ac1cd-kube-api-access-qn5hr\") on node \"crc\" DevicePath \"\"" Dec 02 12:08:49 crc kubenswrapper[4721]: I1202 12:08:49.378567 4721 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/317859f7-dacf-4980-ac45-8d39758ac1cd-util\") on node \"crc\" DevicePath \"\"" Dec 02 12:08:49 crc kubenswrapper[4721]: I1202 12:08:49.995479 4721 generic.go:334] "Generic (PLEG): container finished" podID="cbe54636-ef0a-4d0d-bcbd-1990e8433d04" containerID="015794c7a59acb77419f4cb9303b7c8104a1cffc470528cbb0728af7ce93d1bb" exitCode=0 Dec 02 12:08:49 crc kubenswrapper[4721]: I1202 12:08:49.995574 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-bootstrap-sbkb7" event={"ID":"cbe54636-ef0a-4d0d-bcbd-1990e8433d04","Type":"ContainerDied","Data":"015794c7a59acb77419f4cb9303b7c8104a1cffc470528cbb0728af7ce93d1bb"} Dec 02 12:08:49 crc kubenswrapper[4721]: I1202 12:08:49.998506 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf" event={"ID":"317859f7-dacf-4980-ac45-8d39758ac1cd","Type":"ContainerDied","Data":"cd716e848de2e9e75ecc563d51f6f716619ee2d1cffcb0cfa8e329d6fa002cd0"} Dec 02 12:08:49 crc kubenswrapper[4721]: I1202 12:08:49.998580 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cd716e848de2e9e75ecc563d51f6f716619ee2d1cffcb0cfa8e329d6fa002cd0" Dec 02 12:08:49 crc kubenswrapper[4721]: I1202 12:08:49.998609 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf" Dec 02 12:08:51 crc kubenswrapper[4721]: I1202 12:08:51.287937 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-bootstrap-sbkb7" Dec 02 12:08:51 crc kubenswrapper[4721]: I1202 12:08:51.302910 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-scripts\") pod \"cbe54636-ef0a-4d0d-bcbd-1990e8433d04\" (UID: \"cbe54636-ef0a-4d0d-bcbd-1990e8433d04\") " Dec 02 12:08:51 crc kubenswrapper[4721]: I1202 12:08:51.303006 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-config-data\") pod \"cbe54636-ef0a-4d0d-bcbd-1990e8433d04\" (UID: \"cbe54636-ef0a-4d0d-bcbd-1990e8433d04\") " Dec 02 12:08:51 crc kubenswrapper[4721]: I1202 12:08:51.303034 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-fernet-keys\") pod \"cbe54636-ef0a-4d0d-bcbd-1990e8433d04\" (UID: \"cbe54636-ef0a-4d0d-bcbd-1990e8433d04\") " Dec 02 12:08:51 crc kubenswrapper[4721]: I1202 12:08:51.304953 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-credential-keys\") pod \"cbe54636-ef0a-4d0d-bcbd-1990e8433d04\" (UID: \"cbe54636-ef0a-4d0d-bcbd-1990e8433d04\") " Dec 02 12:08:51 crc kubenswrapper[4721]: I1202 12:08:51.305037 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v2lwq\" (UniqueName: \"kubernetes.io/projected/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-kube-api-access-v2lwq\") pod \"cbe54636-ef0a-4d0d-bcbd-1990e8433d04\" (UID: \"cbe54636-ef0a-4d0d-bcbd-1990e8433d04\") " Dec 02 12:08:51 crc kubenswrapper[4721]: I1202 12:08:51.309465 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "cbe54636-ef0a-4d0d-bcbd-1990e8433d04" (UID: "cbe54636-ef0a-4d0d-bcbd-1990e8433d04"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:08:51 crc kubenswrapper[4721]: I1202 12:08:51.310008 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "cbe54636-ef0a-4d0d-bcbd-1990e8433d04" (UID: "cbe54636-ef0a-4d0d-bcbd-1990e8433d04"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:08:51 crc kubenswrapper[4721]: I1202 12:08:51.312569 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-scripts" (OuterVolumeSpecName: "scripts") pod "cbe54636-ef0a-4d0d-bcbd-1990e8433d04" (UID: "cbe54636-ef0a-4d0d-bcbd-1990e8433d04"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:08:51 crc kubenswrapper[4721]: I1202 12:08:51.323631 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-config-data" (OuterVolumeSpecName: "config-data") pod "cbe54636-ef0a-4d0d-bcbd-1990e8433d04" (UID: "cbe54636-ef0a-4d0d-bcbd-1990e8433d04"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:08:51 crc kubenswrapper[4721]: I1202 12:08:51.325791 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-kube-api-access-v2lwq" (OuterVolumeSpecName: "kube-api-access-v2lwq") pod "cbe54636-ef0a-4d0d-bcbd-1990e8433d04" (UID: "cbe54636-ef0a-4d0d-bcbd-1990e8433d04"). InnerVolumeSpecName "kube-api-access-v2lwq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:08:51 crc kubenswrapper[4721]: I1202 12:08:51.406792 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v2lwq\" (UniqueName: \"kubernetes.io/projected/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-kube-api-access-v2lwq\") on node \"crc\" DevicePath \"\"" Dec 02 12:08:51 crc kubenswrapper[4721]: I1202 12:08:51.406831 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:08:51 crc kubenswrapper[4721]: I1202 12:08:51.406845 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 12:08:51 crc kubenswrapper[4721]: I1202 12:08:51.406860 4721 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 02 12:08:51 crc kubenswrapper[4721]: I1202 12:08:51.406872 4721 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cbe54636-ef0a-4d0d-bcbd-1990e8433d04-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.013503 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-bootstrap-sbkb7" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.013502 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-bootstrap-sbkb7" event={"ID":"cbe54636-ef0a-4d0d-bcbd-1990e8433d04","Type":"ContainerDied","Data":"80d0445cfb40346da6d4a2668428076d066df22795ef74ab5f27cdc16a99c5be"} Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.013992 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="80d0445cfb40346da6d4a2668428076d066df22795ef74ab5f27cdc16a99c5be" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.088841 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/keystone-78b88b8749-htdrf"] Dec 02 12:08:52 crc kubenswrapper[4721]: E1202 12:08:52.089092 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="317859f7-dacf-4980-ac45-8d39758ac1cd" containerName="pull" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.089104 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="317859f7-dacf-4980-ac45-8d39758ac1cd" containerName="pull" Dec 02 12:08:52 crc kubenswrapper[4721]: E1202 12:08:52.089119 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="317859f7-dacf-4980-ac45-8d39758ac1cd" containerName="util" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.089125 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="317859f7-dacf-4980-ac45-8d39758ac1cd" containerName="util" Dec 02 12:08:52 crc kubenswrapper[4721]: E1202 12:08:52.089138 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbe54636-ef0a-4d0d-bcbd-1990e8433d04" containerName="keystone-bootstrap" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.089145 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbe54636-ef0a-4d0d-bcbd-1990e8433d04" containerName="keystone-bootstrap" Dec 02 12:08:52 crc kubenswrapper[4721]: E1202 12:08:52.089152 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="317859f7-dacf-4980-ac45-8d39758ac1cd" containerName="extract" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.089158 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="317859f7-dacf-4980-ac45-8d39758ac1cd" containerName="extract" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.089255 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="317859f7-dacf-4980-ac45-8d39758ac1cd" containerName="extract" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.089271 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbe54636-ef0a-4d0d-bcbd-1990e8433d04" containerName="keystone-bootstrap" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.089694 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-78b88b8749-htdrf" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.095023 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-scripts" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.095309 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.095544 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-keystone-dockercfg-rpvw9" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.095750 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"keystone-config-data" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.153522 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-78b88b8749-htdrf"] Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.222068 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-fernet-keys\") pod \"keystone-78b88b8749-htdrf\" (UID: \"605042d5-6e07-4037-8a8c-0ca3da0d2ac3\") " pod="swift-kuttl-tests/keystone-78b88b8749-htdrf" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.222147 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-credential-keys\") pod \"keystone-78b88b8749-htdrf\" (UID: \"605042d5-6e07-4037-8a8c-0ca3da0d2ac3\") " pod="swift-kuttl-tests/keystone-78b88b8749-htdrf" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.222170 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-config-data\") pod \"keystone-78b88b8749-htdrf\" (UID: \"605042d5-6e07-4037-8a8c-0ca3da0d2ac3\") " pod="swift-kuttl-tests/keystone-78b88b8749-htdrf" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.222191 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbqfs\" (UniqueName: \"kubernetes.io/projected/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-kube-api-access-nbqfs\") pod \"keystone-78b88b8749-htdrf\" (UID: \"605042d5-6e07-4037-8a8c-0ca3da0d2ac3\") " pod="swift-kuttl-tests/keystone-78b88b8749-htdrf" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.222284 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-scripts\") pod \"keystone-78b88b8749-htdrf\" (UID: \"605042d5-6e07-4037-8a8c-0ca3da0d2ac3\") " pod="swift-kuttl-tests/keystone-78b88b8749-htdrf" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.324106 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbqfs\" (UniqueName: \"kubernetes.io/projected/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-kube-api-access-nbqfs\") pod \"keystone-78b88b8749-htdrf\" (UID: \"605042d5-6e07-4037-8a8c-0ca3da0d2ac3\") " pod="swift-kuttl-tests/keystone-78b88b8749-htdrf" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.324182 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-scripts\") pod \"keystone-78b88b8749-htdrf\" (UID: \"605042d5-6e07-4037-8a8c-0ca3da0d2ac3\") " pod="swift-kuttl-tests/keystone-78b88b8749-htdrf" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.324257 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-fernet-keys\") pod \"keystone-78b88b8749-htdrf\" (UID: \"605042d5-6e07-4037-8a8c-0ca3da0d2ac3\") " pod="swift-kuttl-tests/keystone-78b88b8749-htdrf" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.324323 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-credential-keys\") pod \"keystone-78b88b8749-htdrf\" (UID: \"605042d5-6e07-4037-8a8c-0ca3da0d2ac3\") " pod="swift-kuttl-tests/keystone-78b88b8749-htdrf" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.324370 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-config-data\") pod \"keystone-78b88b8749-htdrf\" (UID: \"605042d5-6e07-4037-8a8c-0ca3da0d2ac3\") " pod="swift-kuttl-tests/keystone-78b88b8749-htdrf" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.329908 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-scripts\") pod \"keystone-78b88b8749-htdrf\" (UID: \"605042d5-6e07-4037-8a8c-0ca3da0d2ac3\") " pod="swift-kuttl-tests/keystone-78b88b8749-htdrf" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.330427 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-credential-keys\") pod \"keystone-78b88b8749-htdrf\" (UID: \"605042d5-6e07-4037-8a8c-0ca3da0d2ac3\") " pod="swift-kuttl-tests/keystone-78b88b8749-htdrf" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.331026 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-config-data\") pod \"keystone-78b88b8749-htdrf\" (UID: \"605042d5-6e07-4037-8a8c-0ca3da0d2ac3\") " pod="swift-kuttl-tests/keystone-78b88b8749-htdrf" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.331533 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-fernet-keys\") pod \"keystone-78b88b8749-htdrf\" (UID: \"605042d5-6e07-4037-8a8c-0ca3da0d2ac3\") " pod="swift-kuttl-tests/keystone-78b88b8749-htdrf" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.345536 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbqfs\" (UniqueName: \"kubernetes.io/projected/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-kube-api-access-nbqfs\") pod \"keystone-78b88b8749-htdrf\" (UID: \"605042d5-6e07-4037-8a8c-0ca3da0d2ac3\") " pod="swift-kuttl-tests/keystone-78b88b8749-htdrf" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.435829 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-78b88b8749-htdrf" Dec 02 12:08:52 crc kubenswrapper[4721]: I1202 12:08:52.862418 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone-78b88b8749-htdrf"] Dec 02 12:08:53 crc kubenswrapper[4721]: I1202 12:08:53.022588 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-78b88b8749-htdrf" event={"ID":"605042d5-6e07-4037-8a8c-0ca3da0d2ac3","Type":"ContainerStarted","Data":"e919556c6b839b7be54606fc02688391443d37428cca6d7e107bca50c848a403"} Dec 02 12:08:53 crc kubenswrapper[4721]: I1202 12:08:53.022921 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/keystone-78b88b8749-htdrf" Dec 02 12:08:53 crc kubenswrapper[4721]: I1202 12:08:53.022932 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-78b88b8749-htdrf" event={"ID":"605042d5-6e07-4037-8a8c-0ca3da0d2ac3","Type":"ContainerStarted","Data":"865e06bbc0fec8475d3e53dde884477cd9e7ec52ba7274a7f457f633d2602b7c"} Dec 02 12:08:53 crc kubenswrapper[4721]: I1202 12:08:53.049704 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/keystone-78b88b8749-htdrf" podStartSLOduration=1.049681945 podStartE2EDuration="1.049681945s" podCreationTimestamp="2025-12-02 12:08:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:08:53.038209182 +0000 UTC m=+1062.776759541" watchObservedRunningTime="2025-12-02 12:08:53.049681945 +0000 UTC m=+1062.788232314" Dec 02 12:09:01 crc kubenswrapper[4721]: I1202 12:09:01.678841 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7cf47c6f5c-rw8nh"] Dec 02 12:09:01 crc kubenswrapper[4721]: I1202 12:09:01.680388 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7cf47c6f5c-rw8nh" Dec 02 12:09:01 crc kubenswrapper[4721]: I1202 12:09:01.683242 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-4rmgh" Dec 02 12:09:01 crc kubenswrapper[4721]: I1202 12:09:01.684438 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-service-cert" Dec 02 12:09:01 crc kubenswrapper[4721]: I1202 12:09:01.699927 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7cf47c6f5c-rw8nh"] Dec 02 12:09:01 crc kubenswrapper[4721]: I1202 12:09:01.765819 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmqfz\" (UniqueName: \"kubernetes.io/projected/f9214199-fedd-4dd1-8785-aab679b0178b-kube-api-access-qmqfz\") pod \"barbican-operator-controller-manager-7cf47c6f5c-rw8nh\" (UID: \"f9214199-fedd-4dd1-8785-aab679b0178b\") " pod="openstack-operators/barbican-operator-controller-manager-7cf47c6f5c-rw8nh" Dec 02 12:09:01 crc kubenswrapper[4721]: I1202 12:09:01.765933 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f9214199-fedd-4dd1-8785-aab679b0178b-apiservice-cert\") pod \"barbican-operator-controller-manager-7cf47c6f5c-rw8nh\" (UID: \"f9214199-fedd-4dd1-8785-aab679b0178b\") " pod="openstack-operators/barbican-operator-controller-manager-7cf47c6f5c-rw8nh" Dec 02 12:09:01 crc kubenswrapper[4721]: I1202 12:09:01.765976 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f9214199-fedd-4dd1-8785-aab679b0178b-webhook-cert\") pod \"barbican-operator-controller-manager-7cf47c6f5c-rw8nh\" (UID: \"f9214199-fedd-4dd1-8785-aab679b0178b\") " pod="openstack-operators/barbican-operator-controller-manager-7cf47c6f5c-rw8nh" Dec 02 12:09:01 crc kubenswrapper[4721]: I1202 12:09:01.866981 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f9214199-fedd-4dd1-8785-aab679b0178b-webhook-cert\") pod \"barbican-operator-controller-manager-7cf47c6f5c-rw8nh\" (UID: \"f9214199-fedd-4dd1-8785-aab679b0178b\") " pod="openstack-operators/barbican-operator-controller-manager-7cf47c6f5c-rw8nh" Dec 02 12:09:01 crc kubenswrapper[4721]: I1202 12:09:01.867064 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmqfz\" (UniqueName: \"kubernetes.io/projected/f9214199-fedd-4dd1-8785-aab679b0178b-kube-api-access-qmqfz\") pod \"barbican-operator-controller-manager-7cf47c6f5c-rw8nh\" (UID: \"f9214199-fedd-4dd1-8785-aab679b0178b\") " pod="openstack-operators/barbican-operator-controller-manager-7cf47c6f5c-rw8nh" Dec 02 12:09:01 crc kubenswrapper[4721]: I1202 12:09:01.867181 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f9214199-fedd-4dd1-8785-aab679b0178b-apiservice-cert\") pod \"barbican-operator-controller-manager-7cf47c6f5c-rw8nh\" (UID: \"f9214199-fedd-4dd1-8785-aab679b0178b\") " pod="openstack-operators/barbican-operator-controller-manager-7cf47c6f5c-rw8nh" Dec 02 12:09:01 crc kubenswrapper[4721]: I1202 12:09:01.872871 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f9214199-fedd-4dd1-8785-aab679b0178b-apiservice-cert\") pod \"barbican-operator-controller-manager-7cf47c6f5c-rw8nh\" (UID: \"f9214199-fedd-4dd1-8785-aab679b0178b\") " pod="openstack-operators/barbican-operator-controller-manager-7cf47c6f5c-rw8nh" Dec 02 12:09:01 crc kubenswrapper[4721]: I1202 12:09:01.873687 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f9214199-fedd-4dd1-8785-aab679b0178b-webhook-cert\") pod \"barbican-operator-controller-manager-7cf47c6f5c-rw8nh\" (UID: \"f9214199-fedd-4dd1-8785-aab679b0178b\") " pod="openstack-operators/barbican-operator-controller-manager-7cf47c6f5c-rw8nh" Dec 02 12:09:01 crc kubenswrapper[4721]: I1202 12:09:01.884908 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmqfz\" (UniqueName: \"kubernetes.io/projected/f9214199-fedd-4dd1-8785-aab679b0178b-kube-api-access-qmqfz\") pod \"barbican-operator-controller-manager-7cf47c6f5c-rw8nh\" (UID: \"f9214199-fedd-4dd1-8785-aab679b0178b\") " pod="openstack-operators/barbican-operator-controller-manager-7cf47c6f5c-rw8nh" Dec 02 12:09:02 crc kubenswrapper[4721]: I1202 12:09:02.003233 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7cf47c6f5c-rw8nh" Dec 02 12:09:02 crc kubenswrapper[4721]: I1202 12:09:02.437010 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7cf47c6f5c-rw8nh"] Dec 02 12:09:02 crc kubenswrapper[4721]: W1202 12:09:02.445978 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf9214199_fedd_4dd1_8785_aab679b0178b.slice/crio-41f2c78ebb95dcb6d17375ab6bd2cee4dc3ee4e9fd263822b1a858c550671db0 WatchSource:0}: Error finding container 41f2c78ebb95dcb6d17375ab6bd2cee4dc3ee4e9fd263822b1a858c550671db0: Status 404 returned error can't find the container with id 41f2c78ebb95dcb6d17375ab6bd2cee4dc3ee4e9fd263822b1a858c550671db0 Dec 02 12:09:03 crc kubenswrapper[4721]: I1202 12:09:03.084467 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7cf47c6f5c-rw8nh" event={"ID":"f9214199-fedd-4dd1-8785-aab679b0178b","Type":"ContainerStarted","Data":"41f2c78ebb95dcb6d17375ab6bd2cee4dc3ee4e9fd263822b1a858c550671db0"} Dec 02 12:09:06 crc kubenswrapper[4721]: I1202 12:09:06.104217 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7cf47c6f5c-rw8nh" event={"ID":"f9214199-fedd-4dd1-8785-aab679b0178b","Type":"ContainerStarted","Data":"c97a160f62600a18331f406fb788af3f4416d91e53515e01cd8a2bd6f5f86ffc"} Dec 02 12:09:06 crc kubenswrapper[4721]: I1202 12:09:06.104792 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7cf47c6f5c-rw8nh" Dec 02 12:09:06 crc kubenswrapper[4721]: I1202 12:09:06.128044 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7cf47c6f5c-rw8nh" podStartSLOduration=1.976002872 podStartE2EDuration="5.128018917s" podCreationTimestamp="2025-12-02 12:09:01 +0000 UTC" firstStartedPulling="2025-12-02 12:09:02.44785384 +0000 UTC m=+1072.186404189" lastFinishedPulling="2025-12-02 12:09:05.599869895 +0000 UTC m=+1075.338420234" observedRunningTime="2025-12-02 12:09:06.118929817 +0000 UTC m=+1075.857480206" watchObservedRunningTime="2025-12-02 12:09:06.128018917 +0000 UTC m=+1075.866569296" Dec 02 12:09:12 crc kubenswrapper[4721]: I1202 12:09:12.008893 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7cf47c6f5c-rw8nh" Dec 02 12:09:16 crc kubenswrapper[4721]: I1202 12:09:16.165509 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-index-9bgr4"] Dec 02 12:09:16 crc kubenswrapper[4721]: I1202 12:09:16.167022 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-9bgr4" Dec 02 12:09:16 crc kubenswrapper[4721]: I1202 12:09:16.176423 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-index-9bgr4"] Dec 02 12:09:16 crc kubenswrapper[4721]: I1202 12:09:16.178089 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-index-dockercfg-67cbg" Dec 02 12:09:16 crc kubenswrapper[4721]: I1202 12:09:16.310620 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82b48\" (UniqueName: \"kubernetes.io/projected/59def679-be3c-4abf-bb03-48f78150e46b-kube-api-access-82b48\") pod \"swift-operator-index-9bgr4\" (UID: \"59def679-be3c-4abf-bb03-48f78150e46b\") " pod="openstack-operators/swift-operator-index-9bgr4" Dec 02 12:09:16 crc kubenswrapper[4721]: I1202 12:09:16.412561 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82b48\" (UniqueName: \"kubernetes.io/projected/59def679-be3c-4abf-bb03-48f78150e46b-kube-api-access-82b48\") pod \"swift-operator-index-9bgr4\" (UID: \"59def679-be3c-4abf-bb03-48f78150e46b\") " pod="openstack-operators/swift-operator-index-9bgr4" Dec 02 12:09:16 crc kubenswrapper[4721]: I1202 12:09:16.437493 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82b48\" (UniqueName: \"kubernetes.io/projected/59def679-be3c-4abf-bb03-48f78150e46b-kube-api-access-82b48\") pod \"swift-operator-index-9bgr4\" (UID: \"59def679-be3c-4abf-bb03-48f78150e46b\") " pod="openstack-operators/swift-operator-index-9bgr4" Dec 02 12:09:16 crc kubenswrapper[4721]: I1202 12:09:16.502152 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-9bgr4" Dec 02 12:09:16 crc kubenswrapper[4721]: I1202 12:09:16.931359 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-index-9bgr4"] Dec 02 12:09:16 crc kubenswrapper[4721]: W1202 12:09:16.934947 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod59def679_be3c_4abf_bb03_48f78150e46b.slice/crio-52683a61a44ca9059261cf82fe8a35964d037c928a90e8078e082e1e1037a41e WatchSource:0}: Error finding container 52683a61a44ca9059261cf82fe8a35964d037c928a90e8078e082e1e1037a41e: Status 404 returned error can't find the container with id 52683a61a44ca9059261cf82fe8a35964d037c928a90e8078e082e1e1037a41e Dec 02 12:09:17 crc kubenswrapper[4721]: I1202 12:09:17.191474 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-9bgr4" event={"ID":"59def679-be3c-4abf-bb03-48f78150e46b","Type":"ContainerStarted","Data":"52683a61a44ca9059261cf82fe8a35964d037c928a90e8078e082e1e1037a41e"} Dec 02 12:09:19 crc kubenswrapper[4721]: I1202 12:09:19.217937 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-9bgr4" event={"ID":"59def679-be3c-4abf-bb03-48f78150e46b","Type":"ContainerStarted","Data":"fe4d517355b54bfdc7b108c74d18a214493fc80c42ae77f90a6ebaec5c15759e"} Dec 02 12:09:19 crc kubenswrapper[4721]: I1202 12:09:19.238498 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-index-9bgr4" podStartSLOduration=1.5442316539999998 podStartE2EDuration="3.238471717s" podCreationTimestamp="2025-12-02 12:09:16 +0000 UTC" firstStartedPulling="2025-12-02 12:09:16.936902476 +0000 UTC m=+1086.675452825" lastFinishedPulling="2025-12-02 12:09:18.631142539 +0000 UTC m=+1088.369692888" observedRunningTime="2025-12-02 12:09:19.229360296 +0000 UTC m=+1088.967910675" watchObservedRunningTime="2025-12-02 12:09:19.238471717 +0000 UTC m=+1088.977022096" Dec 02 12:09:20 crc kubenswrapper[4721]: I1202 12:09:20.567322 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/swift-operator-index-9bgr4"] Dec 02 12:09:21 crc kubenswrapper[4721]: I1202 12:09:21.166480 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-index-rrq54"] Dec 02 12:09:21 crc kubenswrapper[4721]: I1202 12:09:21.167447 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-rrq54" Dec 02 12:09:21 crc kubenswrapper[4721]: I1202 12:09:21.178005 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-index-rrq54"] Dec 02 12:09:21 crc kubenswrapper[4721]: I1202 12:09:21.231027 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/swift-operator-index-9bgr4" podUID="59def679-be3c-4abf-bb03-48f78150e46b" containerName="registry-server" containerID="cri-o://fe4d517355b54bfdc7b108c74d18a214493fc80c42ae77f90a6ebaec5c15759e" gracePeriod=2 Dec 02 12:09:21 crc kubenswrapper[4721]: I1202 12:09:21.276406 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rh6cs\" (UniqueName: \"kubernetes.io/projected/94c0083a-0c9e-43fc-9ad3-46517e9b4adb-kube-api-access-rh6cs\") pod \"swift-operator-index-rrq54\" (UID: \"94c0083a-0c9e-43fc-9ad3-46517e9b4adb\") " pod="openstack-operators/swift-operator-index-rrq54" Dec 02 12:09:21 crc kubenswrapper[4721]: I1202 12:09:21.378092 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rh6cs\" (UniqueName: \"kubernetes.io/projected/94c0083a-0c9e-43fc-9ad3-46517e9b4adb-kube-api-access-rh6cs\") pod \"swift-operator-index-rrq54\" (UID: \"94c0083a-0c9e-43fc-9ad3-46517e9b4adb\") " pod="openstack-operators/swift-operator-index-rrq54" Dec 02 12:09:21 crc kubenswrapper[4721]: I1202 12:09:21.396987 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rh6cs\" (UniqueName: \"kubernetes.io/projected/94c0083a-0c9e-43fc-9ad3-46517e9b4adb-kube-api-access-rh6cs\") pod \"swift-operator-index-rrq54\" (UID: \"94c0083a-0c9e-43fc-9ad3-46517e9b4adb\") " pod="openstack-operators/swift-operator-index-rrq54" Dec 02 12:09:21 crc kubenswrapper[4721]: I1202 12:09:21.513732 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-rrq54" Dec 02 12:09:21 crc kubenswrapper[4721]: I1202 12:09:21.632164 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-9bgr4" Dec 02 12:09:21 crc kubenswrapper[4721]: I1202 12:09:21.783552 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-82b48\" (UniqueName: \"kubernetes.io/projected/59def679-be3c-4abf-bb03-48f78150e46b-kube-api-access-82b48\") pod \"59def679-be3c-4abf-bb03-48f78150e46b\" (UID: \"59def679-be3c-4abf-bb03-48f78150e46b\") " Dec 02 12:09:21 crc kubenswrapper[4721]: I1202 12:09:21.788205 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59def679-be3c-4abf-bb03-48f78150e46b-kube-api-access-82b48" (OuterVolumeSpecName: "kube-api-access-82b48") pod "59def679-be3c-4abf-bb03-48f78150e46b" (UID: "59def679-be3c-4abf-bb03-48f78150e46b"). InnerVolumeSpecName "kube-api-access-82b48". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:09:21 crc kubenswrapper[4721]: I1202 12:09:21.884890 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-82b48\" (UniqueName: \"kubernetes.io/projected/59def679-be3c-4abf-bb03-48f78150e46b-kube-api-access-82b48\") on node \"crc\" DevicePath \"\"" Dec 02 12:09:21 crc kubenswrapper[4721]: I1202 12:09:21.930957 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-index-rrq54"] Dec 02 12:09:21 crc kubenswrapper[4721]: W1202 12:09:21.934987 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod94c0083a_0c9e_43fc_9ad3_46517e9b4adb.slice/crio-d93c57e85b1009a41e62863acb39fd4f4f1887e1629cbb56b8c66b2d96168cef WatchSource:0}: Error finding container d93c57e85b1009a41e62863acb39fd4f4f1887e1629cbb56b8c66b2d96168cef: Status 404 returned error can't find the container with id d93c57e85b1009a41e62863acb39fd4f4f1887e1629cbb56b8c66b2d96168cef Dec 02 12:09:22 crc kubenswrapper[4721]: I1202 12:09:22.239058 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-rrq54" event={"ID":"94c0083a-0c9e-43fc-9ad3-46517e9b4adb","Type":"ContainerStarted","Data":"5bebcd28f5e6ce63bc3e97ea20dfc561b0d551275460afaaba1c334b1504622d"} Dec 02 12:09:22 crc kubenswrapper[4721]: I1202 12:09:22.239442 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-rrq54" event={"ID":"94c0083a-0c9e-43fc-9ad3-46517e9b4adb","Type":"ContainerStarted","Data":"d93c57e85b1009a41e62863acb39fd4f4f1887e1629cbb56b8c66b2d96168cef"} Dec 02 12:09:22 crc kubenswrapper[4721]: I1202 12:09:22.263016 4721 generic.go:334] "Generic (PLEG): container finished" podID="59def679-be3c-4abf-bb03-48f78150e46b" containerID="fe4d517355b54bfdc7b108c74d18a214493fc80c42ae77f90a6ebaec5c15759e" exitCode=0 Dec 02 12:09:22 crc kubenswrapper[4721]: I1202 12:09:22.263073 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-9bgr4" event={"ID":"59def679-be3c-4abf-bb03-48f78150e46b","Type":"ContainerDied","Data":"fe4d517355b54bfdc7b108c74d18a214493fc80c42ae77f90a6ebaec5c15759e"} Dec 02 12:09:22 crc kubenswrapper[4721]: I1202 12:09:22.263102 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-9bgr4" event={"ID":"59def679-be3c-4abf-bb03-48f78150e46b","Type":"ContainerDied","Data":"52683a61a44ca9059261cf82fe8a35964d037c928a90e8078e082e1e1037a41e"} Dec 02 12:09:22 crc kubenswrapper[4721]: I1202 12:09:22.263117 4721 scope.go:117] "RemoveContainer" containerID="fe4d517355b54bfdc7b108c74d18a214493fc80c42ae77f90a6ebaec5c15759e" Dec 02 12:09:22 crc kubenswrapper[4721]: I1202 12:09:22.263280 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-9bgr4" Dec 02 12:09:22 crc kubenswrapper[4721]: I1202 12:09:22.266328 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-index-rrq54" podStartSLOduration=1.202935525 podStartE2EDuration="1.266303253s" podCreationTimestamp="2025-12-02 12:09:21 +0000 UTC" firstStartedPulling="2025-12-02 12:09:21.937984936 +0000 UTC m=+1091.676535285" lastFinishedPulling="2025-12-02 12:09:22.001352674 +0000 UTC m=+1091.739903013" observedRunningTime="2025-12-02 12:09:22.26198696 +0000 UTC m=+1092.000537329" watchObservedRunningTime="2025-12-02 12:09:22.266303253 +0000 UTC m=+1092.004853602" Dec 02 12:09:22 crc kubenswrapper[4721]: I1202 12:09:22.322071 4721 scope.go:117] "RemoveContainer" containerID="fe4d517355b54bfdc7b108c74d18a214493fc80c42ae77f90a6ebaec5c15759e" Dec 02 12:09:22 crc kubenswrapper[4721]: E1202 12:09:22.322477 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe4d517355b54bfdc7b108c74d18a214493fc80c42ae77f90a6ebaec5c15759e\": container with ID starting with fe4d517355b54bfdc7b108c74d18a214493fc80c42ae77f90a6ebaec5c15759e not found: ID does not exist" containerID="fe4d517355b54bfdc7b108c74d18a214493fc80c42ae77f90a6ebaec5c15759e" Dec 02 12:09:22 crc kubenswrapper[4721]: I1202 12:09:22.322503 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe4d517355b54bfdc7b108c74d18a214493fc80c42ae77f90a6ebaec5c15759e"} err="failed to get container status \"fe4d517355b54bfdc7b108c74d18a214493fc80c42ae77f90a6ebaec5c15759e\": rpc error: code = NotFound desc = could not find container \"fe4d517355b54bfdc7b108c74d18a214493fc80c42ae77f90a6ebaec5c15759e\": container with ID starting with fe4d517355b54bfdc7b108c74d18a214493fc80c42ae77f90a6ebaec5c15759e not found: ID does not exist" Dec 02 12:09:22 crc kubenswrapper[4721]: I1202 12:09:22.323629 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/swift-operator-index-9bgr4"] Dec 02 12:09:22 crc kubenswrapper[4721]: I1202 12:09:22.330800 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/swift-operator-index-9bgr4"] Dec 02 12:09:22 crc kubenswrapper[4721]: I1202 12:09:22.846511 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59def679-be3c-4abf-bb03-48f78150e46b" path="/var/lib/kubelet/pods/59def679-be3c-4abf-bb03-48f78150e46b/volumes" Dec 02 12:09:23 crc kubenswrapper[4721]: I1202 12:09:23.869679 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/keystone-78b88b8749-htdrf" Dec 02 12:09:31 crc kubenswrapper[4721]: I1202 12:09:31.514800 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/swift-operator-index-rrq54" Dec 02 12:09:31 crc kubenswrapper[4721]: I1202 12:09:31.515559 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-index-rrq54" Dec 02 12:09:31 crc kubenswrapper[4721]: I1202 12:09:31.544762 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/swift-operator-index-rrq54" Dec 02 12:09:32 crc kubenswrapper[4721]: I1202 12:09:32.382751 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-index-rrq54" Dec 02 12:09:33 crc kubenswrapper[4721]: I1202 12:09:33.411039 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb"] Dec 02 12:09:33 crc kubenswrapper[4721]: E1202 12:09:33.412090 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59def679-be3c-4abf-bb03-48f78150e46b" containerName="registry-server" Dec 02 12:09:33 crc kubenswrapper[4721]: I1202 12:09:33.412168 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="59def679-be3c-4abf-bb03-48f78150e46b" containerName="registry-server" Dec 02 12:09:33 crc kubenswrapper[4721]: I1202 12:09:33.412335 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="59def679-be3c-4abf-bb03-48f78150e46b" containerName="registry-server" Dec 02 12:09:33 crc kubenswrapper[4721]: I1202 12:09:33.413255 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb" Dec 02 12:09:33 crc kubenswrapper[4721]: I1202 12:09:33.415364 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-6fpcc" Dec 02 12:09:33 crc kubenswrapper[4721]: I1202 12:09:33.423729 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb"] Dec 02 12:09:33 crc kubenswrapper[4721]: I1202 12:09:33.463824 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6ef97fe9-ccd2-4131-80f8-8ef651bd503e-bundle\") pod \"f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb\" (UID: \"6ef97fe9-ccd2-4131-80f8-8ef651bd503e\") " pod="openstack-operators/f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb" Dec 02 12:09:33 crc kubenswrapper[4721]: I1202 12:09:33.463879 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6ef97fe9-ccd2-4131-80f8-8ef651bd503e-util\") pod \"f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb\" (UID: \"6ef97fe9-ccd2-4131-80f8-8ef651bd503e\") " pod="openstack-operators/f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb" Dec 02 12:09:33 crc kubenswrapper[4721]: I1202 12:09:33.464031 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jsjv\" (UniqueName: \"kubernetes.io/projected/6ef97fe9-ccd2-4131-80f8-8ef651bd503e-kube-api-access-7jsjv\") pod \"f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb\" (UID: \"6ef97fe9-ccd2-4131-80f8-8ef651bd503e\") " pod="openstack-operators/f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb" Dec 02 12:09:33 crc kubenswrapper[4721]: I1202 12:09:33.567037 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jsjv\" (UniqueName: \"kubernetes.io/projected/6ef97fe9-ccd2-4131-80f8-8ef651bd503e-kube-api-access-7jsjv\") pod \"f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb\" (UID: \"6ef97fe9-ccd2-4131-80f8-8ef651bd503e\") " pod="openstack-operators/f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb" Dec 02 12:09:33 crc kubenswrapper[4721]: I1202 12:09:33.567129 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6ef97fe9-ccd2-4131-80f8-8ef651bd503e-bundle\") pod \"f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb\" (UID: \"6ef97fe9-ccd2-4131-80f8-8ef651bd503e\") " pod="openstack-operators/f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb" Dec 02 12:09:33 crc kubenswrapper[4721]: I1202 12:09:33.567181 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6ef97fe9-ccd2-4131-80f8-8ef651bd503e-util\") pod \"f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb\" (UID: \"6ef97fe9-ccd2-4131-80f8-8ef651bd503e\") " pod="openstack-operators/f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb" Dec 02 12:09:33 crc kubenswrapper[4721]: I1202 12:09:33.567754 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6ef97fe9-ccd2-4131-80f8-8ef651bd503e-bundle\") pod \"f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb\" (UID: \"6ef97fe9-ccd2-4131-80f8-8ef651bd503e\") " pod="openstack-operators/f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb" Dec 02 12:09:33 crc kubenswrapper[4721]: I1202 12:09:33.567805 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6ef97fe9-ccd2-4131-80f8-8ef651bd503e-util\") pod \"f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb\" (UID: \"6ef97fe9-ccd2-4131-80f8-8ef651bd503e\") " pod="openstack-operators/f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb" Dec 02 12:09:33 crc kubenswrapper[4721]: I1202 12:09:33.584521 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jsjv\" (UniqueName: \"kubernetes.io/projected/6ef97fe9-ccd2-4131-80f8-8ef651bd503e-kube-api-access-7jsjv\") pod \"f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb\" (UID: \"6ef97fe9-ccd2-4131-80f8-8ef651bd503e\") " pod="openstack-operators/f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb" Dec 02 12:09:33 crc kubenswrapper[4721]: I1202 12:09:33.732505 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb" Dec 02 12:09:33 crc kubenswrapper[4721]: I1202 12:09:33.863737 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/barbican-db-create-84xxm"] Dec 02 12:09:33 crc kubenswrapper[4721]: I1202 12:09:33.864483 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-db-create-84xxm" Dec 02 12:09:33 crc kubenswrapper[4721]: I1202 12:09:33.876479 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-db-create-84xxm"] Dec 02 12:09:33 crc kubenswrapper[4721]: I1202 12:09:33.946907 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/barbican-958c-account-create-update-hm5b5"] Dec 02 12:09:33 crc kubenswrapper[4721]: I1202 12:09:33.953671 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-958c-account-create-update-hm5b5"] Dec 02 12:09:33 crc kubenswrapper[4721]: I1202 12:09:33.953782 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-958c-account-create-update-hm5b5" Dec 02 12:09:33 crc kubenswrapper[4721]: I1202 12:09:33.956264 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"barbican-db-secret" Dec 02 12:09:33 crc kubenswrapper[4721]: I1202 12:09:33.976946 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0da604f8-2b6f-40a1-82ca-27842e0bb72e-operator-scripts\") pod \"barbican-db-create-84xxm\" (UID: \"0da604f8-2b6f-40a1-82ca-27842e0bb72e\") " pod="swift-kuttl-tests/barbican-db-create-84xxm" Dec 02 12:09:33 crc kubenswrapper[4721]: I1202 12:09:33.977097 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfrvb\" (UniqueName: \"kubernetes.io/projected/0da604f8-2b6f-40a1-82ca-27842e0bb72e-kube-api-access-lfrvb\") pod \"barbican-db-create-84xxm\" (UID: \"0da604f8-2b6f-40a1-82ca-27842e0bb72e\") " pod="swift-kuttl-tests/barbican-db-create-84xxm" Dec 02 12:09:34 crc kubenswrapper[4721]: I1202 12:09:34.078305 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8b4k7\" (UniqueName: \"kubernetes.io/projected/a721153e-8340-4695-8dfa-7c47d0f9ca66-kube-api-access-8b4k7\") pod \"barbican-958c-account-create-update-hm5b5\" (UID: \"a721153e-8340-4695-8dfa-7c47d0f9ca66\") " pod="swift-kuttl-tests/barbican-958c-account-create-update-hm5b5" Dec 02 12:09:34 crc kubenswrapper[4721]: I1202 12:09:34.078411 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0da604f8-2b6f-40a1-82ca-27842e0bb72e-operator-scripts\") pod \"barbican-db-create-84xxm\" (UID: \"0da604f8-2b6f-40a1-82ca-27842e0bb72e\") " pod="swift-kuttl-tests/barbican-db-create-84xxm" Dec 02 12:09:34 crc kubenswrapper[4721]: I1202 12:09:34.078469 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfrvb\" (UniqueName: \"kubernetes.io/projected/0da604f8-2b6f-40a1-82ca-27842e0bb72e-kube-api-access-lfrvb\") pod \"barbican-db-create-84xxm\" (UID: \"0da604f8-2b6f-40a1-82ca-27842e0bb72e\") " pod="swift-kuttl-tests/barbican-db-create-84xxm" Dec 02 12:09:34 crc kubenswrapper[4721]: I1202 12:09:34.078580 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a721153e-8340-4695-8dfa-7c47d0f9ca66-operator-scripts\") pod \"barbican-958c-account-create-update-hm5b5\" (UID: \"a721153e-8340-4695-8dfa-7c47d0f9ca66\") " pod="swift-kuttl-tests/barbican-958c-account-create-update-hm5b5" Dec 02 12:09:34 crc kubenswrapper[4721]: I1202 12:09:34.079601 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0da604f8-2b6f-40a1-82ca-27842e0bb72e-operator-scripts\") pod \"barbican-db-create-84xxm\" (UID: \"0da604f8-2b6f-40a1-82ca-27842e0bb72e\") " pod="swift-kuttl-tests/barbican-db-create-84xxm" Dec 02 12:09:34 crc kubenswrapper[4721]: I1202 12:09:34.095731 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfrvb\" (UniqueName: \"kubernetes.io/projected/0da604f8-2b6f-40a1-82ca-27842e0bb72e-kube-api-access-lfrvb\") pod \"barbican-db-create-84xxm\" (UID: \"0da604f8-2b6f-40a1-82ca-27842e0bb72e\") " pod="swift-kuttl-tests/barbican-db-create-84xxm" Dec 02 12:09:34 crc kubenswrapper[4721]: I1202 12:09:34.203209 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-db-create-84xxm" Dec 02 12:09:34 crc kubenswrapper[4721]: I1202 12:09:34.204100 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a721153e-8340-4695-8dfa-7c47d0f9ca66-operator-scripts\") pod \"barbican-958c-account-create-update-hm5b5\" (UID: \"a721153e-8340-4695-8dfa-7c47d0f9ca66\") " pod="swift-kuttl-tests/barbican-958c-account-create-update-hm5b5" Dec 02 12:09:34 crc kubenswrapper[4721]: I1202 12:09:34.204180 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8b4k7\" (UniqueName: \"kubernetes.io/projected/a721153e-8340-4695-8dfa-7c47d0f9ca66-kube-api-access-8b4k7\") pod \"barbican-958c-account-create-update-hm5b5\" (UID: \"a721153e-8340-4695-8dfa-7c47d0f9ca66\") " pod="swift-kuttl-tests/barbican-958c-account-create-update-hm5b5" Dec 02 12:09:34 crc kubenswrapper[4721]: I1202 12:09:34.205325 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a721153e-8340-4695-8dfa-7c47d0f9ca66-operator-scripts\") pod \"barbican-958c-account-create-update-hm5b5\" (UID: \"a721153e-8340-4695-8dfa-7c47d0f9ca66\") " pod="swift-kuttl-tests/barbican-958c-account-create-update-hm5b5" Dec 02 12:09:34 crc kubenswrapper[4721]: I1202 12:09:34.223521 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8b4k7\" (UniqueName: \"kubernetes.io/projected/a721153e-8340-4695-8dfa-7c47d0f9ca66-kube-api-access-8b4k7\") pod \"barbican-958c-account-create-update-hm5b5\" (UID: \"a721153e-8340-4695-8dfa-7c47d0f9ca66\") " pod="swift-kuttl-tests/barbican-958c-account-create-update-hm5b5" Dec 02 12:09:34 crc kubenswrapper[4721]: I1202 12:09:34.244083 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb"] Dec 02 12:09:34 crc kubenswrapper[4721]: I1202 12:09:34.275127 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-958c-account-create-update-hm5b5" Dec 02 12:09:34 crc kubenswrapper[4721]: I1202 12:09:34.369318 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb" event={"ID":"6ef97fe9-ccd2-4131-80f8-8ef651bd503e","Type":"ContainerStarted","Data":"ec31d8b740d6a338dab8c61c5250690c6ebfe122337b6b12dfa7414542dbd6cf"} Dec 02 12:09:34 crc kubenswrapper[4721]: I1202 12:09:34.416641 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-db-create-84xxm"] Dec 02 12:09:34 crc kubenswrapper[4721]: W1202 12:09:34.426565 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0da604f8_2b6f_40a1_82ca_27842e0bb72e.slice/crio-6267c38b165b8f8f3f250e36368812bab7e9f64561e142d6adfebe56af0b49d0 WatchSource:0}: Error finding container 6267c38b165b8f8f3f250e36368812bab7e9f64561e142d6adfebe56af0b49d0: Status 404 returned error can't find the container with id 6267c38b165b8f8f3f250e36368812bab7e9f64561e142d6adfebe56af0b49d0 Dec 02 12:09:34 crc kubenswrapper[4721]: I1202 12:09:34.531807 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-958c-account-create-update-hm5b5"] Dec 02 12:09:34 crc kubenswrapper[4721]: W1202 12:09:34.534825 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda721153e_8340_4695_8dfa_7c47d0f9ca66.slice/crio-be3684c92d6df87a7dabcd168af74c251e11ba4e8e7967db7a6023f04b99abb9 WatchSource:0}: Error finding container be3684c92d6df87a7dabcd168af74c251e11ba4e8e7967db7a6023f04b99abb9: Status 404 returned error can't find the container with id be3684c92d6df87a7dabcd168af74c251e11ba4e8e7967db7a6023f04b99abb9 Dec 02 12:09:35 crc kubenswrapper[4721]: I1202 12:09:35.378562 4721 generic.go:334] "Generic (PLEG): container finished" podID="a721153e-8340-4695-8dfa-7c47d0f9ca66" containerID="a650ef7ce606582ec00ad4221f0497a8fdce279b33223a7af7d20baafc76f5b0" exitCode=0 Dec 02 12:09:35 crc kubenswrapper[4721]: I1202 12:09:35.378633 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-958c-account-create-update-hm5b5" event={"ID":"a721153e-8340-4695-8dfa-7c47d0f9ca66","Type":"ContainerDied","Data":"a650ef7ce606582ec00ad4221f0497a8fdce279b33223a7af7d20baafc76f5b0"} Dec 02 12:09:35 crc kubenswrapper[4721]: I1202 12:09:35.378665 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-958c-account-create-update-hm5b5" event={"ID":"a721153e-8340-4695-8dfa-7c47d0f9ca66","Type":"ContainerStarted","Data":"be3684c92d6df87a7dabcd168af74c251e11ba4e8e7967db7a6023f04b99abb9"} Dec 02 12:09:35 crc kubenswrapper[4721]: I1202 12:09:35.380232 4721 generic.go:334] "Generic (PLEG): container finished" podID="6ef97fe9-ccd2-4131-80f8-8ef651bd503e" containerID="6a414ffa8cfba24ff794f2c3eb9a2556a18ebd835959ebf730eba0362661e927" exitCode=0 Dec 02 12:09:35 crc kubenswrapper[4721]: I1202 12:09:35.380278 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb" event={"ID":"6ef97fe9-ccd2-4131-80f8-8ef651bd503e","Type":"ContainerDied","Data":"6a414ffa8cfba24ff794f2c3eb9a2556a18ebd835959ebf730eba0362661e927"} Dec 02 12:09:35 crc kubenswrapper[4721]: I1202 12:09:35.384035 4721 generic.go:334] "Generic (PLEG): container finished" podID="0da604f8-2b6f-40a1-82ca-27842e0bb72e" containerID="2eb528eb57613938286320dc883dc05bef02c820daaff9518effa88078a359e1" exitCode=0 Dec 02 12:09:35 crc kubenswrapper[4721]: I1202 12:09:35.384081 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-db-create-84xxm" event={"ID":"0da604f8-2b6f-40a1-82ca-27842e0bb72e","Type":"ContainerDied","Data":"2eb528eb57613938286320dc883dc05bef02c820daaff9518effa88078a359e1"} Dec 02 12:09:35 crc kubenswrapper[4721]: I1202 12:09:35.384120 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-db-create-84xxm" event={"ID":"0da604f8-2b6f-40a1-82ca-27842e0bb72e","Type":"ContainerStarted","Data":"6267c38b165b8f8f3f250e36368812bab7e9f64561e142d6adfebe56af0b49d0"} Dec 02 12:09:36 crc kubenswrapper[4721]: I1202 12:09:36.394285 4721 generic.go:334] "Generic (PLEG): container finished" podID="6ef97fe9-ccd2-4131-80f8-8ef651bd503e" containerID="5e4bd538fec49d6ffcab7dc5632b80a9c071dac18fde28366f23eab915fc0a58" exitCode=0 Dec 02 12:09:36 crc kubenswrapper[4721]: I1202 12:09:36.394330 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb" event={"ID":"6ef97fe9-ccd2-4131-80f8-8ef651bd503e","Type":"ContainerDied","Data":"5e4bd538fec49d6ffcab7dc5632b80a9c071dac18fde28366f23eab915fc0a58"} Dec 02 12:09:36 crc kubenswrapper[4721]: I1202 12:09:36.790845 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-db-create-84xxm" Dec 02 12:09:36 crc kubenswrapper[4721]: I1202 12:09:36.798579 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-958c-account-create-update-hm5b5" Dec 02 12:09:36 crc kubenswrapper[4721]: I1202 12:09:36.953886 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a721153e-8340-4695-8dfa-7c47d0f9ca66-operator-scripts\") pod \"a721153e-8340-4695-8dfa-7c47d0f9ca66\" (UID: \"a721153e-8340-4695-8dfa-7c47d0f9ca66\") " Dec 02 12:09:36 crc kubenswrapper[4721]: I1202 12:09:36.953994 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0da604f8-2b6f-40a1-82ca-27842e0bb72e-operator-scripts\") pod \"0da604f8-2b6f-40a1-82ca-27842e0bb72e\" (UID: \"0da604f8-2b6f-40a1-82ca-27842e0bb72e\") " Dec 02 12:09:36 crc kubenswrapper[4721]: I1202 12:09:36.954037 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lfrvb\" (UniqueName: \"kubernetes.io/projected/0da604f8-2b6f-40a1-82ca-27842e0bb72e-kube-api-access-lfrvb\") pod \"0da604f8-2b6f-40a1-82ca-27842e0bb72e\" (UID: \"0da604f8-2b6f-40a1-82ca-27842e0bb72e\") " Dec 02 12:09:36 crc kubenswrapper[4721]: I1202 12:09:36.954149 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8b4k7\" (UniqueName: \"kubernetes.io/projected/a721153e-8340-4695-8dfa-7c47d0f9ca66-kube-api-access-8b4k7\") pod \"a721153e-8340-4695-8dfa-7c47d0f9ca66\" (UID: \"a721153e-8340-4695-8dfa-7c47d0f9ca66\") " Dec 02 12:09:36 crc kubenswrapper[4721]: I1202 12:09:36.954996 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a721153e-8340-4695-8dfa-7c47d0f9ca66-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a721153e-8340-4695-8dfa-7c47d0f9ca66" (UID: "a721153e-8340-4695-8dfa-7c47d0f9ca66"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:09:36 crc kubenswrapper[4721]: I1202 12:09:36.959268 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0da604f8-2b6f-40a1-82ca-27842e0bb72e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0da604f8-2b6f-40a1-82ca-27842e0bb72e" (UID: "0da604f8-2b6f-40a1-82ca-27842e0bb72e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:09:36 crc kubenswrapper[4721]: I1202 12:09:36.959682 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a721153e-8340-4695-8dfa-7c47d0f9ca66-kube-api-access-8b4k7" (OuterVolumeSpecName: "kube-api-access-8b4k7") pod "a721153e-8340-4695-8dfa-7c47d0f9ca66" (UID: "a721153e-8340-4695-8dfa-7c47d0f9ca66"). InnerVolumeSpecName "kube-api-access-8b4k7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:09:36 crc kubenswrapper[4721]: I1202 12:09:36.965057 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0da604f8-2b6f-40a1-82ca-27842e0bb72e-kube-api-access-lfrvb" (OuterVolumeSpecName: "kube-api-access-lfrvb") pod "0da604f8-2b6f-40a1-82ca-27842e0bb72e" (UID: "0da604f8-2b6f-40a1-82ca-27842e0bb72e"). InnerVolumeSpecName "kube-api-access-lfrvb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:09:37 crc kubenswrapper[4721]: I1202 12:09:37.055499 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a721153e-8340-4695-8dfa-7c47d0f9ca66-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:09:37 crc kubenswrapper[4721]: I1202 12:09:37.055525 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0da604f8-2b6f-40a1-82ca-27842e0bb72e-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:09:37 crc kubenswrapper[4721]: I1202 12:09:37.055537 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lfrvb\" (UniqueName: \"kubernetes.io/projected/0da604f8-2b6f-40a1-82ca-27842e0bb72e-kube-api-access-lfrvb\") on node \"crc\" DevicePath \"\"" Dec 02 12:09:37 crc kubenswrapper[4721]: I1202 12:09:37.055547 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8b4k7\" (UniqueName: \"kubernetes.io/projected/a721153e-8340-4695-8dfa-7c47d0f9ca66-kube-api-access-8b4k7\") on node \"crc\" DevicePath \"\"" Dec 02 12:09:37 crc kubenswrapper[4721]: I1202 12:09:37.402190 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-958c-account-create-update-hm5b5" event={"ID":"a721153e-8340-4695-8dfa-7c47d0f9ca66","Type":"ContainerDied","Data":"be3684c92d6df87a7dabcd168af74c251e11ba4e8e7967db7a6023f04b99abb9"} Dec 02 12:09:37 crc kubenswrapper[4721]: I1202 12:09:37.402237 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="be3684c92d6df87a7dabcd168af74c251e11ba4e8e7967db7a6023f04b99abb9" Dec 02 12:09:37 crc kubenswrapper[4721]: I1202 12:09:37.403324 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-958c-account-create-update-hm5b5" Dec 02 12:09:37 crc kubenswrapper[4721]: I1202 12:09:37.405007 4721 generic.go:334] "Generic (PLEG): container finished" podID="6ef97fe9-ccd2-4131-80f8-8ef651bd503e" containerID="a4098a524eb6077b7f2eba5a8cc8b53104ff0ba883f693651e5fcbe91c89ae15" exitCode=0 Dec 02 12:09:37 crc kubenswrapper[4721]: I1202 12:09:37.405066 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb" event={"ID":"6ef97fe9-ccd2-4131-80f8-8ef651bd503e","Type":"ContainerDied","Data":"a4098a524eb6077b7f2eba5a8cc8b53104ff0ba883f693651e5fcbe91c89ae15"} Dec 02 12:09:37 crc kubenswrapper[4721]: I1202 12:09:37.406275 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-db-create-84xxm" event={"ID":"0da604f8-2b6f-40a1-82ca-27842e0bb72e","Type":"ContainerDied","Data":"6267c38b165b8f8f3f250e36368812bab7e9f64561e142d6adfebe56af0b49d0"} Dec 02 12:09:37 crc kubenswrapper[4721]: I1202 12:09:37.406299 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6267c38b165b8f8f3f250e36368812bab7e9f64561e142d6adfebe56af0b49d0" Dec 02 12:09:37 crc kubenswrapper[4721]: I1202 12:09:37.406382 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-db-create-84xxm" Dec 02 12:09:38 crc kubenswrapper[4721]: I1202 12:09:38.733722 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb" Dec 02 12:09:38 crc kubenswrapper[4721]: I1202 12:09:38.882307 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6ef97fe9-ccd2-4131-80f8-8ef651bd503e-bundle\") pod \"6ef97fe9-ccd2-4131-80f8-8ef651bd503e\" (UID: \"6ef97fe9-ccd2-4131-80f8-8ef651bd503e\") " Dec 02 12:09:38 crc kubenswrapper[4721]: I1202 12:09:38.882477 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6ef97fe9-ccd2-4131-80f8-8ef651bd503e-util\") pod \"6ef97fe9-ccd2-4131-80f8-8ef651bd503e\" (UID: \"6ef97fe9-ccd2-4131-80f8-8ef651bd503e\") " Dec 02 12:09:38 crc kubenswrapper[4721]: I1202 12:09:38.882547 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7jsjv\" (UniqueName: \"kubernetes.io/projected/6ef97fe9-ccd2-4131-80f8-8ef651bd503e-kube-api-access-7jsjv\") pod \"6ef97fe9-ccd2-4131-80f8-8ef651bd503e\" (UID: \"6ef97fe9-ccd2-4131-80f8-8ef651bd503e\") " Dec 02 12:09:38 crc kubenswrapper[4721]: I1202 12:09:38.884890 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ef97fe9-ccd2-4131-80f8-8ef651bd503e-bundle" (OuterVolumeSpecName: "bundle") pod "6ef97fe9-ccd2-4131-80f8-8ef651bd503e" (UID: "6ef97fe9-ccd2-4131-80f8-8ef651bd503e"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:09:38 crc kubenswrapper[4721]: I1202 12:09:38.893891 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ef97fe9-ccd2-4131-80f8-8ef651bd503e-kube-api-access-7jsjv" (OuterVolumeSpecName: "kube-api-access-7jsjv") pod "6ef97fe9-ccd2-4131-80f8-8ef651bd503e" (UID: "6ef97fe9-ccd2-4131-80f8-8ef651bd503e"). InnerVolumeSpecName "kube-api-access-7jsjv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:09:38 crc kubenswrapper[4721]: I1202 12:09:38.897894 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ef97fe9-ccd2-4131-80f8-8ef651bd503e-util" (OuterVolumeSpecName: "util") pod "6ef97fe9-ccd2-4131-80f8-8ef651bd503e" (UID: "6ef97fe9-ccd2-4131-80f8-8ef651bd503e"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:09:38 crc kubenswrapper[4721]: I1202 12:09:38.984652 4721 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6ef97fe9-ccd2-4131-80f8-8ef651bd503e-util\") on node \"crc\" DevicePath \"\"" Dec 02 12:09:38 crc kubenswrapper[4721]: I1202 12:09:38.984696 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7jsjv\" (UniqueName: \"kubernetes.io/projected/6ef97fe9-ccd2-4131-80f8-8ef651bd503e-kube-api-access-7jsjv\") on node \"crc\" DevicePath \"\"" Dec 02 12:09:38 crc kubenswrapper[4721]: I1202 12:09:38.984707 4721 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6ef97fe9-ccd2-4131-80f8-8ef651bd503e-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 12:09:39 crc kubenswrapper[4721]: I1202 12:09:39.172238 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/barbican-db-sync-h58xl"] Dec 02 12:09:39 crc kubenswrapper[4721]: E1202 12:09:39.172514 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ef97fe9-ccd2-4131-80f8-8ef651bd503e" containerName="util" Dec 02 12:09:39 crc kubenswrapper[4721]: I1202 12:09:39.172531 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ef97fe9-ccd2-4131-80f8-8ef651bd503e" containerName="util" Dec 02 12:09:39 crc kubenswrapper[4721]: E1202 12:09:39.172546 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ef97fe9-ccd2-4131-80f8-8ef651bd503e" containerName="extract" Dec 02 12:09:39 crc kubenswrapper[4721]: I1202 12:09:39.172554 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ef97fe9-ccd2-4131-80f8-8ef651bd503e" containerName="extract" Dec 02 12:09:39 crc kubenswrapper[4721]: E1202 12:09:39.172565 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0da604f8-2b6f-40a1-82ca-27842e0bb72e" containerName="mariadb-database-create" Dec 02 12:09:39 crc kubenswrapper[4721]: I1202 12:09:39.172571 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="0da604f8-2b6f-40a1-82ca-27842e0bb72e" containerName="mariadb-database-create" Dec 02 12:09:39 crc kubenswrapper[4721]: E1202 12:09:39.172581 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ef97fe9-ccd2-4131-80f8-8ef651bd503e" containerName="pull" Dec 02 12:09:39 crc kubenswrapper[4721]: I1202 12:09:39.172587 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ef97fe9-ccd2-4131-80f8-8ef651bd503e" containerName="pull" Dec 02 12:09:39 crc kubenswrapper[4721]: E1202 12:09:39.172601 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a721153e-8340-4695-8dfa-7c47d0f9ca66" containerName="mariadb-account-create-update" Dec 02 12:09:39 crc kubenswrapper[4721]: I1202 12:09:39.172607 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="a721153e-8340-4695-8dfa-7c47d0f9ca66" containerName="mariadb-account-create-update" Dec 02 12:09:39 crc kubenswrapper[4721]: I1202 12:09:39.172720 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ef97fe9-ccd2-4131-80f8-8ef651bd503e" containerName="extract" Dec 02 12:09:39 crc kubenswrapper[4721]: I1202 12:09:39.172739 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="0da604f8-2b6f-40a1-82ca-27842e0bb72e" containerName="mariadb-database-create" Dec 02 12:09:39 crc kubenswrapper[4721]: I1202 12:09:39.172749 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="a721153e-8340-4695-8dfa-7c47d0f9ca66" containerName="mariadb-account-create-update" Dec 02 12:09:39 crc kubenswrapper[4721]: I1202 12:09:39.173158 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-db-sync-h58xl" Dec 02 12:09:39 crc kubenswrapper[4721]: I1202 12:09:39.179436 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"barbican-config-data" Dec 02 12:09:39 crc kubenswrapper[4721]: I1202 12:09:39.180449 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"barbican-barbican-dockercfg-b5x2g" Dec 02 12:09:39 crc kubenswrapper[4721]: I1202 12:09:39.185799 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-db-sync-h58xl"] Dec 02 12:09:39 crc kubenswrapper[4721]: I1202 12:09:39.289483 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2tbv\" (UniqueName: \"kubernetes.io/projected/911bdab8-6e80-4fb4-8cde-4e5702b37fa2-kube-api-access-x2tbv\") pod \"barbican-db-sync-h58xl\" (UID: \"911bdab8-6e80-4fb4-8cde-4e5702b37fa2\") " pod="swift-kuttl-tests/barbican-db-sync-h58xl" Dec 02 12:09:39 crc kubenswrapper[4721]: I1202 12:09:39.289584 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/911bdab8-6e80-4fb4-8cde-4e5702b37fa2-db-sync-config-data\") pod \"barbican-db-sync-h58xl\" (UID: \"911bdab8-6e80-4fb4-8cde-4e5702b37fa2\") " pod="swift-kuttl-tests/barbican-db-sync-h58xl" Dec 02 12:09:39 crc kubenswrapper[4721]: I1202 12:09:39.391447 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/911bdab8-6e80-4fb4-8cde-4e5702b37fa2-db-sync-config-data\") pod \"barbican-db-sync-h58xl\" (UID: \"911bdab8-6e80-4fb4-8cde-4e5702b37fa2\") " pod="swift-kuttl-tests/barbican-db-sync-h58xl" Dec 02 12:09:39 crc kubenswrapper[4721]: I1202 12:09:39.391723 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2tbv\" (UniqueName: \"kubernetes.io/projected/911bdab8-6e80-4fb4-8cde-4e5702b37fa2-kube-api-access-x2tbv\") pod \"barbican-db-sync-h58xl\" (UID: \"911bdab8-6e80-4fb4-8cde-4e5702b37fa2\") " pod="swift-kuttl-tests/barbican-db-sync-h58xl" Dec 02 12:09:39 crc kubenswrapper[4721]: I1202 12:09:39.396027 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/911bdab8-6e80-4fb4-8cde-4e5702b37fa2-db-sync-config-data\") pod \"barbican-db-sync-h58xl\" (UID: \"911bdab8-6e80-4fb4-8cde-4e5702b37fa2\") " pod="swift-kuttl-tests/barbican-db-sync-h58xl" Dec 02 12:09:39 crc kubenswrapper[4721]: I1202 12:09:39.408057 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2tbv\" (UniqueName: \"kubernetes.io/projected/911bdab8-6e80-4fb4-8cde-4e5702b37fa2-kube-api-access-x2tbv\") pod \"barbican-db-sync-h58xl\" (UID: \"911bdab8-6e80-4fb4-8cde-4e5702b37fa2\") " pod="swift-kuttl-tests/barbican-db-sync-h58xl" Dec 02 12:09:39 crc kubenswrapper[4721]: I1202 12:09:39.422935 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb" event={"ID":"6ef97fe9-ccd2-4131-80f8-8ef651bd503e","Type":"ContainerDied","Data":"ec31d8b740d6a338dab8c61c5250690c6ebfe122337b6b12dfa7414542dbd6cf"} Dec 02 12:09:39 crc kubenswrapper[4721]: I1202 12:09:39.422981 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec31d8b740d6a338dab8c61c5250690c6ebfe122337b6b12dfa7414542dbd6cf" Dec 02 12:09:39 crc kubenswrapper[4721]: I1202 12:09:39.423050 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb" Dec 02 12:09:39 crc kubenswrapper[4721]: I1202 12:09:39.488330 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-db-sync-h58xl" Dec 02 12:09:39 crc kubenswrapper[4721]: I1202 12:09:39.729860 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-db-sync-h58xl"] Dec 02 12:09:39 crc kubenswrapper[4721]: W1202 12:09:39.735446 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod911bdab8_6e80_4fb4_8cde_4e5702b37fa2.slice/crio-68ea833d23a3ba697f52b6bd236a65c3b8805e6c2bd556219d0751bed6d109f5 WatchSource:0}: Error finding container 68ea833d23a3ba697f52b6bd236a65c3b8805e6c2bd556219d0751bed6d109f5: Status 404 returned error can't find the container with id 68ea833d23a3ba697f52b6bd236a65c3b8805e6c2bd556219d0751bed6d109f5 Dec 02 12:09:40 crc kubenswrapper[4721]: I1202 12:09:40.432958 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-db-sync-h58xl" event={"ID":"911bdab8-6e80-4fb4-8cde-4e5702b37fa2","Type":"ContainerStarted","Data":"68ea833d23a3ba697f52b6bd236a65c3b8805e6c2bd556219d0751bed6d109f5"} Dec 02 12:09:45 crc kubenswrapper[4721]: I1202 12:09:45.466612 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-db-sync-h58xl" event={"ID":"911bdab8-6e80-4fb4-8cde-4e5702b37fa2","Type":"ContainerStarted","Data":"a6830c7ebe1d4ca0528ad5a79fce46880086b7fe0020443bcffd89f877cc8c74"} Dec 02 12:09:45 crc kubenswrapper[4721]: I1202 12:09:45.480737 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/barbican-db-sync-h58xl" podStartSLOduration=1.66309359 podStartE2EDuration="6.480719925s" podCreationTimestamp="2025-12-02 12:09:39 +0000 UTC" firstStartedPulling="2025-12-02 12:09:39.738307048 +0000 UTC m=+1109.476857397" lastFinishedPulling="2025-12-02 12:09:44.555933383 +0000 UTC m=+1114.294483732" observedRunningTime="2025-12-02 12:09:45.477253748 +0000 UTC m=+1115.215804097" watchObservedRunningTime="2025-12-02 12:09:45.480719925 +0000 UTC m=+1115.219270274" Dec 02 12:09:47 crc kubenswrapper[4721]: I1202 12:09:47.482661 4721 generic.go:334] "Generic (PLEG): container finished" podID="911bdab8-6e80-4fb4-8cde-4e5702b37fa2" containerID="a6830c7ebe1d4ca0528ad5a79fce46880086b7fe0020443bcffd89f877cc8c74" exitCode=0 Dec 02 12:09:47 crc kubenswrapper[4721]: I1202 12:09:47.482764 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-db-sync-h58xl" event={"ID":"911bdab8-6e80-4fb4-8cde-4e5702b37fa2","Type":"ContainerDied","Data":"a6830c7ebe1d4ca0528ad5a79fce46880086b7fe0020443bcffd89f877cc8c74"} Dec 02 12:09:48 crc kubenswrapper[4721]: I1202 12:09:48.868134 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-db-sync-h58xl" Dec 02 12:09:48 crc kubenswrapper[4721]: I1202 12:09:48.966883 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-8b6cbcf7c-qdvhm"] Dec 02 12:09:48 crc kubenswrapper[4721]: E1202 12:09:48.967226 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="911bdab8-6e80-4fb4-8cde-4e5702b37fa2" containerName="barbican-db-sync" Dec 02 12:09:48 crc kubenswrapper[4721]: I1202 12:09:48.967242 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="911bdab8-6e80-4fb4-8cde-4e5702b37fa2" containerName="barbican-db-sync" Dec 02 12:09:48 crc kubenswrapper[4721]: I1202 12:09:48.967402 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="911bdab8-6e80-4fb4-8cde-4e5702b37fa2" containerName="barbican-db-sync" Dec 02 12:09:48 crc kubenswrapper[4721]: I1202 12:09:48.967820 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-8b6cbcf7c-qdvhm" Dec 02 12:09:48 crc kubenswrapper[4721]: I1202 12:09:48.969377 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-rgqvl" Dec 02 12:09:48 crc kubenswrapper[4721]: I1202 12:09:48.969872 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-service-cert" Dec 02 12:09:48 crc kubenswrapper[4721]: I1202 12:09:48.977757 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-8b6cbcf7c-qdvhm"] Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.046083 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2tbv\" (UniqueName: \"kubernetes.io/projected/911bdab8-6e80-4fb4-8cde-4e5702b37fa2-kube-api-access-x2tbv\") pod \"911bdab8-6e80-4fb4-8cde-4e5702b37fa2\" (UID: \"911bdab8-6e80-4fb4-8cde-4e5702b37fa2\") " Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.046418 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/911bdab8-6e80-4fb4-8cde-4e5702b37fa2-db-sync-config-data\") pod \"911bdab8-6e80-4fb4-8cde-4e5702b37fa2\" (UID: \"911bdab8-6e80-4fb4-8cde-4e5702b37fa2\") " Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.051779 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/911bdab8-6e80-4fb4-8cde-4e5702b37fa2-kube-api-access-x2tbv" (OuterVolumeSpecName: "kube-api-access-x2tbv") pod "911bdab8-6e80-4fb4-8cde-4e5702b37fa2" (UID: "911bdab8-6e80-4fb4-8cde-4e5702b37fa2"). InnerVolumeSpecName "kube-api-access-x2tbv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.053842 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/911bdab8-6e80-4fb4-8cde-4e5702b37fa2-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "911bdab8-6e80-4fb4-8cde-4e5702b37fa2" (UID: "911bdab8-6e80-4fb4-8cde-4e5702b37fa2"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.147594 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x89w9\" (UniqueName: \"kubernetes.io/projected/cc05df77-4dd2-4d22-87e8-7a0e9ff5181c-kube-api-access-x89w9\") pod \"swift-operator-controller-manager-8b6cbcf7c-qdvhm\" (UID: \"cc05df77-4dd2-4d22-87e8-7a0e9ff5181c\") " pod="openstack-operators/swift-operator-controller-manager-8b6cbcf7c-qdvhm" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.147856 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cc05df77-4dd2-4d22-87e8-7a0e9ff5181c-apiservice-cert\") pod \"swift-operator-controller-manager-8b6cbcf7c-qdvhm\" (UID: \"cc05df77-4dd2-4d22-87e8-7a0e9ff5181c\") " pod="openstack-operators/swift-operator-controller-manager-8b6cbcf7c-qdvhm" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.147960 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cc05df77-4dd2-4d22-87e8-7a0e9ff5181c-webhook-cert\") pod \"swift-operator-controller-manager-8b6cbcf7c-qdvhm\" (UID: \"cc05df77-4dd2-4d22-87e8-7a0e9ff5181c\") " pod="openstack-operators/swift-operator-controller-manager-8b6cbcf7c-qdvhm" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.148066 4721 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/911bdab8-6e80-4fb4-8cde-4e5702b37fa2-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.148137 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2tbv\" (UniqueName: \"kubernetes.io/projected/911bdab8-6e80-4fb4-8cde-4e5702b37fa2-kube-api-access-x2tbv\") on node \"crc\" DevicePath \"\"" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.250013 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x89w9\" (UniqueName: \"kubernetes.io/projected/cc05df77-4dd2-4d22-87e8-7a0e9ff5181c-kube-api-access-x89w9\") pod \"swift-operator-controller-manager-8b6cbcf7c-qdvhm\" (UID: \"cc05df77-4dd2-4d22-87e8-7a0e9ff5181c\") " pod="openstack-operators/swift-operator-controller-manager-8b6cbcf7c-qdvhm" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.250292 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cc05df77-4dd2-4d22-87e8-7a0e9ff5181c-apiservice-cert\") pod \"swift-operator-controller-manager-8b6cbcf7c-qdvhm\" (UID: \"cc05df77-4dd2-4d22-87e8-7a0e9ff5181c\") " pod="openstack-operators/swift-operator-controller-manager-8b6cbcf7c-qdvhm" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.250420 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cc05df77-4dd2-4d22-87e8-7a0e9ff5181c-webhook-cert\") pod \"swift-operator-controller-manager-8b6cbcf7c-qdvhm\" (UID: \"cc05df77-4dd2-4d22-87e8-7a0e9ff5181c\") " pod="openstack-operators/swift-operator-controller-manager-8b6cbcf7c-qdvhm" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.254044 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cc05df77-4dd2-4d22-87e8-7a0e9ff5181c-webhook-cert\") pod \"swift-operator-controller-manager-8b6cbcf7c-qdvhm\" (UID: \"cc05df77-4dd2-4d22-87e8-7a0e9ff5181c\") " pod="openstack-operators/swift-operator-controller-manager-8b6cbcf7c-qdvhm" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.254133 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cc05df77-4dd2-4d22-87e8-7a0e9ff5181c-apiservice-cert\") pod \"swift-operator-controller-manager-8b6cbcf7c-qdvhm\" (UID: \"cc05df77-4dd2-4d22-87e8-7a0e9ff5181c\") " pod="openstack-operators/swift-operator-controller-manager-8b6cbcf7c-qdvhm" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.277686 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x89w9\" (UniqueName: \"kubernetes.io/projected/cc05df77-4dd2-4d22-87e8-7a0e9ff5181c-kube-api-access-x89w9\") pod \"swift-operator-controller-manager-8b6cbcf7c-qdvhm\" (UID: \"cc05df77-4dd2-4d22-87e8-7a0e9ff5181c\") " pod="openstack-operators/swift-operator-controller-manager-8b6cbcf7c-qdvhm" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.286410 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-8b6cbcf7c-qdvhm" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.502753 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-db-sync-h58xl" event={"ID":"911bdab8-6e80-4fb4-8cde-4e5702b37fa2","Type":"ContainerDied","Data":"68ea833d23a3ba697f52b6bd236a65c3b8805e6c2bd556219d0751bed6d109f5"} Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.502790 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="68ea833d23a3ba697f52b6bd236a65c3b8805e6c2bd556219d0751bed6d109f5" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.502841 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-db-sync-h58xl" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.717069 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-8b6cbcf7c-qdvhm"] Dec 02 12:09:49 crc kubenswrapper[4721]: W1202 12:09:49.724485 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcc05df77_4dd2_4d22_87e8_7a0e9ff5181c.slice/crio-db096804f2a36442bddeef9f7f17e762d03ac4ef1347f9301fe07d3e70eecf39 WatchSource:0}: Error finding container db096804f2a36442bddeef9f7f17e762d03ac4ef1347f9301fe07d3e70eecf39: Status 404 returned error can't find the container with id db096804f2a36442bddeef9f7f17e762d03ac4ef1347f9301fe07d3e70eecf39 Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.754964 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625"] Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.756400 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.766325 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"barbican-keystone-listener-config-data" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.766365 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"barbican-config-data" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.766926 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"barbican-barbican-dockercfg-b5x2g" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.776417 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625"] Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.817419 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/barbican-worker-6d756b6777-n645z"] Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.818490 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-worker-6d756b6777-n645z" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.823750 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"barbican-worker-config-data" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.838617 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-worker-6d756b6777-n645z"] Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.867536 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/barbican-api-746f899bcb-ql4pn"] Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.874589 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca6ac23f-63bc-4601-abfd-d5a61402f51f-config-data\") pod \"barbican-keystone-listener-69fbfdbb48-s7625\" (UID: \"ca6ac23f-63bc-4601-abfd-d5a61402f51f\") " pod="swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.874956 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ca6ac23f-63bc-4601-abfd-d5a61402f51f-config-data-custom\") pod \"barbican-keystone-listener-69fbfdbb48-s7625\" (UID: \"ca6ac23f-63bc-4601-abfd-d5a61402f51f\") " pod="swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.874997 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4ghc\" (UniqueName: \"kubernetes.io/projected/ca6ac23f-63bc-4601-abfd-d5a61402f51f-kube-api-access-f4ghc\") pod \"barbican-keystone-listener-69fbfdbb48-s7625\" (UID: \"ca6ac23f-63bc-4601-abfd-d5a61402f51f\") " pod="swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.875217 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca6ac23f-63bc-4601-abfd-d5a61402f51f-logs\") pod \"barbican-keystone-listener-69fbfdbb48-s7625\" (UID: \"ca6ac23f-63bc-4601-abfd-d5a61402f51f\") " pod="swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.885093 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.886753 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-api-746f899bcb-ql4pn"] Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.887019 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"barbican-api-config-data" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.983547 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5-logs\") pod \"barbican-worker-6d756b6777-n645z\" (UID: \"d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5\") " pod="swift-kuttl-tests/barbican-worker-6d756b6777-n645z" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.983603 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5-config-data-custom\") pod \"barbican-worker-6d756b6777-n645z\" (UID: \"d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5\") " pod="swift-kuttl-tests/barbican-worker-6d756b6777-n645z" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.983632 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5-config-data\") pod \"barbican-worker-6d756b6777-n645z\" (UID: \"d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5\") " pod="swift-kuttl-tests/barbican-worker-6d756b6777-n645z" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.983653 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dq86b\" (UniqueName: \"kubernetes.io/projected/d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5-kube-api-access-dq86b\") pod \"barbican-worker-6d756b6777-n645z\" (UID: \"d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5\") " pod="swift-kuttl-tests/barbican-worker-6d756b6777-n645z" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.983720 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca6ac23f-63bc-4601-abfd-d5a61402f51f-config-data\") pod \"barbican-keystone-listener-69fbfdbb48-s7625\" (UID: \"ca6ac23f-63bc-4601-abfd-d5a61402f51f\") " pod="swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.983750 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ca6ac23f-63bc-4601-abfd-d5a61402f51f-config-data-custom\") pod \"barbican-keystone-listener-69fbfdbb48-s7625\" (UID: \"ca6ac23f-63bc-4601-abfd-d5a61402f51f\") " pod="swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.983800 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4ghc\" (UniqueName: \"kubernetes.io/projected/ca6ac23f-63bc-4601-abfd-d5a61402f51f-kube-api-access-f4ghc\") pod \"barbican-keystone-listener-69fbfdbb48-s7625\" (UID: \"ca6ac23f-63bc-4601-abfd-d5a61402f51f\") " pod="swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.983826 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca6ac23f-63bc-4601-abfd-d5a61402f51f-logs\") pod \"barbican-keystone-listener-69fbfdbb48-s7625\" (UID: \"ca6ac23f-63bc-4601-abfd-d5a61402f51f\") " pod="swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.984853 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca6ac23f-63bc-4601-abfd-d5a61402f51f-logs\") pod \"barbican-keystone-listener-69fbfdbb48-s7625\" (UID: \"ca6ac23f-63bc-4601-abfd-d5a61402f51f\") " pod="swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.989860 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ca6ac23f-63bc-4601-abfd-d5a61402f51f-config-data-custom\") pod \"barbican-keystone-listener-69fbfdbb48-s7625\" (UID: \"ca6ac23f-63bc-4601-abfd-d5a61402f51f\") " pod="swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625" Dec 02 12:09:49 crc kubenswrapper[4721]: I1202 12:09:49.996496 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca6ac23f-63bc-4601-abfd-d5a61402f51f-config-data\") pod \"barbican-keystone-listener-69fbfdbb48-s7625\" (UID: \"ca6ac23f-63bc-4601-abfd-d5a61402f51f\") " pod="swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625" Dec 02 12:09:50 crc kubenswrapper[4721]: I1202 12:09:50.001434 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4ghc\" (UniqueName: \"kubernetes.io/projected/ca6ac23f-63bc-4601-abfd-d5a61402f51f-kube-api-access-f4ghc\") pod \"barbican-keystone-listener-69fbfdbb48-s7625\" (UID: \"ca6ac23f-63bc-4601-abfd-d5a61402f51f\") " pod="swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625" Dec 02 12:09:50 crc kubenswrapper[4721]: I1202 12:09:50.075454 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625" Dec 02 12:09:50 crc kubenswrapper[4721]: I1202 12:09:50.084778 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/04a81817-b7ad-473c-8a11-a79c9e61bcb7-config-data-custom\") pod \"barbican-api-746f899bcb-ql4pn\" (UID: \"04a81817-b7ad-473c-8a11-a79c9e61bcb7\") " pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" Dec 02 12:09:50 crc kubenswrapper[4721]: I1202 12:09:50.084857 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04a81817-b7ad-473c-8a11-a79c9e61bcb7-config-data\") pod \"barbican-api-746f899bcb-ql4pn\" (UID: \"04a81817-b7ad-473c-8a11-a79c9e61bcb7\") " pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" Dec 02 12:09:50 crc kubenswrapper[4721]: I1202 12:09:50.084951 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjghn\" (UniqueName: \"kubernetes.io/projected/04a81817-b7ad-473c-8a11-a79c9e61bcb7-kube-api-access-mjghn\") pod \"barbican-api-746f899bcb-ql4pn\" (UID: \"04a81817-b7ad-473c-8a11-a79c9e61bcb7\") " pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" Dec 02 12:09:50 crc kubenswrapper[4721]: I1202 12:09:50.085010 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04a81817-b7ad-473c-8a11-a79c9e61bcb7-logs\") pod \"barbican-api-746f899bcb-ql4pn\" (UID: \"04a81817-b7ad-473c-8a11-a79c9e61bcb7\") " pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" Dec 02 12:09:50 crc kubenswrapper[4721]: I1202 12:09:50.085147 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5-logs\") pod \"barbican-worker-6d756b6777-n645z\" (UID: \"d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5\") " pod="swift-kuttl-tests/barbican-worker-6d756b6777-n645z" Dec 02 12:09:50 crc kubenswrapper[4721]: I1202 12:09:50.085212 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5-config-data-custom\") pod \"barbican-worker-6d756b6777-n645z\" (UID: \"d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5\") " pod="swift-kuttl-tests/barbican-worker-6d756b6777-n645z" Dec 02 12:09:50 crc kubenswrapper[4721]: I1202 12:09:50.085252 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5-config-data\") pod \"barbican-worker-6d756b6777-n645z\" (UID: \"d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5\") " pod="swift-kuttl-tests/barbican-worker-6d756b6777-n645z" Dec 02 12:09:50 crc kubenswrapper[4721]: I1202 12:09:50.085271 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dq86b\" (UniqueName: \"kubernetes.io/projected/d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5-kube-api-access-dq86b\") pod \"barbican-worker-6d756b6777-n645z\" (UID: \"d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5\") " pod="swift-kuttl-tests/barbican-worker-6d756b6777-n645z" Dec 02 12:09:50 crc kubenswrapper[4721]: I1202 12:09:50.085964 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5-logs\") pod \"barbican-worker-6d756b6777-n645z\" (UID: \"d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5\") " pod="swift-kuttl-tests/barbican-worker-6d756b6777-n645z" Dec 02 12:09:50 crc kubenswrapper[4721]: I1202 12:09:50.089544 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5-config-data-custom\") pod \"barbican-worker-6d756b6777-n645z\" (UID: \"d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5\") " pod="swift-kuttl-tests/barbican-worker-6d756b6777-n645z" Dec 02 12:09:50 crc kubenswrapper[4721]: I1202 12:09:50.090046 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5-config-data\") pod \"barbican-worker-6d756b6777-n645z\" (UID: \"d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5\") " pod="swift-kuttl-tests/barbican-worker-6d756b6777-n645z" Dec 02 12:09:50 crc kubenswrapper[4721]: I1202 12:09:50.103021 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dq86b\" (UniqueName: \"kubernetes.io/projected/d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5-kube-api-access-dq86b\") pod \"barbican-worker-6d756b6777-n645z\" (UID: \"d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5\") " pod="swift-kuttl-tests/barbican-worker-6d756b6777-n645z" Dec 02 12:09:50 crc kubenswrapper[4721]: I1202 12:09:50.186700 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-worker-6d756b6777-n645z" Dec 02 12:09:50 crc kubenswrapper[4721]: I1202 12:09:50.187159 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/04a81817-b7ad-473c-8a11-a79c9e61bcb7-config-data-custom\") pod \"barbican-api-746f899bcb-ql4pn\" (UID: \"04a81817-b7ad-473c-8a11-a79c9e61bcb7\") " pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" Dec 02 12:09:50 crc kubenswrapper[4721]: I1202 12:09:50.187228 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04a81817-b7ad-473c-8a11-a79c9e61bcb7-config-data\") pod \"barbican-api-746f899bcb-ql4pn\" (UID: \"04a81817-b7ad-473c-8a11-a79c9e61bcb7\") " pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" Dec 02 12:09:50 crc kubenswrapper[4721]: I1202 12:09:50.187246 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjghn\" (UniqueName: \"kubernetes.io/projected/04a81817-b7ad-473c-8a11-a79c9e61bcb7-kube-api-access-mjghn\") pod \"barbican-api-746f899bcb-ql4pn\" (UID: \"04a81817-b7ad-473c-8a11-a79c9e61bcb7\") " pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" Dec 02 12:09:50 crc kubenswrapper[4721]: I1202 12:09:50.187265 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04a81817-b7ad-473c-8a11-a79c9e61bcb7-logs\") pod \"barbican-api-746f899bcb-ql4pn\" (UID: \"04a81817-b7ad-473c-8a11-a79c9e61bcb7\") " pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" Dec 02 12:09:50 crc kubenswrapper[4721]: I1202 12:09:50.187674 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04a81817-b7ad-473c-8a11-a79c9e61bcb7-logs\") pod \"barbican-api-746f899bcb-ql4pn\" (UID: \"04a81817-b7ad-473c-8a11-a79c9e61bcb7\") " pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" Dec 02 12:09:50 crc kubenswrapper[4721]: I1202 12:09:50.195991 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04a81817-b7ad-473c-8a11-a79c9e61bcb7-config-data\") pod \"barbican-api-746f899bcb-ql4pn\" (UID: \"04a81817-b7ad-473c-8a11-a79c9e61bcb7\") " pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" Dec 02 12:09:50 crc kubenswrapper[4721]: I1202 12:09:50.196020 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/04a81817-b7ad-473c-8a11-a79c9e61bcb7-config-data-custom\") pod \"barbican-api-746f899bcb-ql4pn\" (UID: \"04a81817-b7ad-473c-8a11-a79c9e61bcb7\") " pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" Dec 02 12:09:50 crc kubenswrapper[4721]: I1202 12:09:50.214383 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjghn\" (UniqueName: \"kubernetes.io/projected/04a81817-b7ad-473c-8a11-a79c9e61bcb7-kube-api-access-mjghn\") pod \"barbican-api-746f899bcb-ql4pn\" (UID: \"04a81817-b7ad-473c-8a11-a79c9e61bcb7\") " pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" Dec 02 12:09:50 crc kubenswrapper[4721]: I1202 12:09:50.496874 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" Dec 02 12:09:50 crc kubenswrapper[4721]: I1202 12:09:50.518133 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-8b6cbcf7c-qdvhm" event={"ID":"cc05df77-4dd2-4d22-87e8-7a0e9ff5181c","Type":"ContainerStarted","Data":"db096804f2a36442bddeef9f7f17e762d03ac4ef1347f9301fe07d3e70eecf39"} Dec 02 12:09:50 crc kubenswrapper[4721]: I1202 12:09:50.534129 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625"] Dec 02 12:09:50 crc kubenswrapper[4721]: W1202 12:09:50.538130 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podca6ac23f_63bc_4601_abfd_d5a61402f51f.slice/crio-a0deff91528ede51a37da0888bc6d3fe7c45e0f84c991b9a1835f4fabdd7190e WatchSource:0}: Error finding container a0deff91528ede51a37da0888bc6d3fe7c45e0f84c991b9a1835f4fabdd7190e: Status 404 returned error can't find the container with id a0deff91528ede51a37da0888bc6d3fe7c45e0f84c991b9a1835f4fabdd7190e Dec 02 12:09:50 crc kubenswrapper[4721]: I1202 12:09:50.711178 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-worker-6d756b6777-n645z"] Dec 02 12:09:50 crc kubenswrapper[4721]: W1202 12:09:50.727509 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd8ddc2f8_ddc4_4730_9b03_f81a3eece4a5.slice/crio-1b18c094746ff3573472ba57776c5acd08ee68c465d4e1740cfd98d1b970c18f WatchSource:0}: Error finding container 1b18c094746ff3573472ba57776c5acd08ee68c465d4e1740cfd98d1b970c18f: Status 404 returned error can't find the container with id 1b18c094746ff3573472ba57776c5acd08ee68c465d4e1740cfd98d1b970c18f Dec 02 12:09:50 crc kubenswrapper[4721]: I1202 12:09:50.794922 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican-api-746f899bcb-ql4pn"] Dec 02 12:09:51 crc kubenswrapper[4721]: I1202 12:09:51.526666 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" event={"ID":"04a81817-b7ad-473c-8a11-a79c9e61bcb7","Type":"ContainerStarted","Data":"8b04ec68c42841a803c8710dcf5afc63ab39aca55e3ac7fc8ad62b0c00eb33de"} Dec 02 12:09:51 crc kubenswrapper[4721]: I1202 12:09:51.527084 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" event={"ID":"04a81817-b7ad-473c-8a11-a79c9e61bcb7","Type":"ContainerStarted","Data":"9ad16c65327cbc3cef461309012a20f8d647a6b8db3bfa04f117bb78da6dca5e"} Dec 02 12:09:51 crc kubenswrapper[4721]: I1202 12:09:51.529682 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625" event={"ID":"ca6ac23f-63bc-4601-abfd-d5a61402f51f","Type":"ContainerStarted","Data":"a0deff91528ede51a37da0888bc6d3fe7c45e0f84c991b9a1835f4fabdd7190e"} Dec 02 12:09:51 crc kubenswrapper[4721]: I1202 12:09:51.531460 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-worker-6d756b6777-n645z" event={"ID":"d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5","Type":"ContainerStarted","Data":"1b18c094746ff3573472ba57776c5acd08ee68c465d4e1740cfd98d1b970c18f"} Dec 02 12:09:52 crc kubenswrapper[4721]: I1202 12:09:52.545967 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" event={"ID":"04a81817-b7ad-473c-8a11-a79c9e61bcb7","Type":"ContainerStarted","Data":"749a001f205f44785b1aa789ce5ac84e7b7522c904eb4118466d2a1e1f306c0c"} Dec 02 12:09:52 crc kubenswrapper[4721]: I1202 12:09:52.546528 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" Dec 02 12:09:52 crc kubenswrapper[4721]: I1202 12:09:52.546541 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" Dec 02 12:09:52 crc kubenswrapper[4721]: I1202 12:09:52.562580 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-8b6cbcf7c-qdvhm" event={"ID":"cc05df77-4dd2-4d22-87e8-7a0e9ff5181c","Type":"ContainerStarted","Data":"35977b5d3981a8b0aa61a0e9c1cf728e40d4ded2985eecd9d6570d56959ee1b6"} Dec 02 12:09:52 crc kubenswrapper[4721]: I1202 12:09:52.563352 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-8b6cbcf7c-qdvhm" Dec 02 12:09:52 crc kubenswrapper[4721]: I1202 12:09:52.568901 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" podStartSLOduration=3.568887737 podStartE2EDuration="3.568887737s" podCreationTimestamp="2025-12-02 12:09:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:09:52.567284687 +0000 UTC m=+1122.305835036" watchObservedRunningTime="2025-12-02 12:09:52.568887737 +0000 UTC m=+1122.307438086" Dec 02 12:09:52 crc kubenswrapper[4721]: I1202 12:09:52.593812 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-8b6cbcf7c-qdvhm" podStartSLOduration=2.148104766 podStartE2EDuration="4.593794963s" podCreationTimestamp="2025-12-02 12:09:48 +0000 UTC" firstStartedPulling="2025-12-02 12:09:49.72616297 +0000 UTC m=+1119.464713319" lastFinishedPulling="2025-12-02 12:09:52.171853167 +0000 UTC m=+1121.910403516" observedRunningTime="2025-12-02 12:09:52.587243001 +0000 UTC m=+1122.325793340" watchObservedRunningTime="2025-12-02 12:09:52.593794963 +0000 UTC m=+1122.332345312" Dec 02 12:09:53 crc kubenswrapper[4721]: I1202 12:09:53.569932 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625" event={"ID":"ca6ac23f-63bc-4601-abfd-d5a61402f51f","Type":"ContainerStarted","Data":"387d354c93317bbbf2bf2aeaf2d880a6c8ec047947c845b69c5d348daef62faf"} Dec 02 12:09:53 crc kubenswrapper[4721]: I1202 12:09:53.570298 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625" event={"ID":"ca6ac23f-63bc-4601-abfd-d5a61402f51f","Type":"ContainerStarted","Data":"064adc3fb0a1656c8f95b1deafca3f5ead1d7812b36af11130552138add457fb"} Dec 02 12:09:53 crc kubenswrapper[4721]: I1202 12:09:53.574863 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-worker-6d756b6777-n645z" event={"ID":"d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5","Type":"ContainerStarted","Data":"2a434ede44dd08007f5fdd84c8c299dea5d11d6a5e16cacc130ea92cfb2ed432"} Dec 02 12:09:53 crc kubenswrapper[4721]: I1202 12:09:53.574905 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-worker-6d756b6777-n645z" event={"ID":"d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5","Type":"ContainerStarted","Data":"e7754533dd8cefe98d4994cc7e37962d8eaa78a097f114de5bc05d6950b71dd7"} Dec 02 12:09:53 crc kubenswrapper[4721]: I1202 12:09:53.590297 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625" podStartSLOduration=2.022288988 podStartE2EDuration="4.59028138s" podCreationTimestamp="2025-12-02 12:09:49 +0000 UTC" firstStartedPulling="2025-12-02 12:09:50.540246423 +0000 UTC m=+1120.278796762" lastFinishedPulling="2025-12-02 12:09:53.108238805 +0000 UTC m=+1122.846789154" observedRunningTime="2025-12-02 12:09:53.586003018 +0000 UTC m=+1123.324553367" watchObservedRunningTime="2025-12-02 12:09:53.59028138 +0000 UTC m=+1123.328831729" Dec 02 12:09:53 crc kubenswrapper[4721]: I1202 12:09:53.614362 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/barbican-worker-6d756b6777-n645z" podStartSLOduration=2.2375809540000002 podStartE2EDuration="4.61432577s" podCreationTimestamp="2025-12-02 12:09:49 +0000 UTC" firstStartedPulling="2025-12-02 12:09:50.730173658 +0000 UTC m=+1120.468724007" lastFinishedPulling="2025-12-02 12:09:53.106918474 +0000 UTC m=+1122.845468823" observedRunningTime="2025-12-02 12:09:53.610013758 +0000 UTC m=+1123.348564107" watchObservedRunningTime="2025-12-02 12:09:53.61432577 +0000 UTC m=+1123.352876119" Dec 02 12:09:56 crc kubenswrapper[4721]: I1202 12:09:56.934916 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" Dec 02 12:09:59 crc kubenswrapper[4721]: I1202 12:09:59.292870 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-8b6cbcf7c-qdvhm" Dec 02 12:10:01 crc kubenswrapper[4721]: I1202 12:10:01.969889 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.136752 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.140889 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.142733 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-files" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.142749 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-conf" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.142911 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-swift-dockercfg-fk2ls" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.142877 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-storage-config-data" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.168548 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.201851 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/27eb4674-477b-4189-857a-d5a572e89277-lock\") pod \"swift-storage-0\" (UID: \"27eb4674-477b-4189-857a-d5a572e89277\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.201982 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/27eb4674-477b-4189-857a-d5a572e89277-cache\") pod \"swift-storage-0\" (UID: \"27eb4674-477b-4189-857a-d5a572e89277\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.202014 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqnst\" (UniqueName: \"kubernetes.io/projected/27eb4674-477b-4189-857a-d5a572e89277-kube-api-access-wqnst\") pod \"swift-storage-0\" (UID: \"27eb4674-477b-4189-857a-d5a572e89277\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.202038 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"27eb4674-477b-4189-857a-d5a572e89277\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.202056 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/27eb4674-477b-4189-857a-d5a572e89277-etc-swift\") pod \"swift-storage-0\" (UID: \"27eb4674-477b-4189-857a-d5a572e89277\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.303575 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqnst\" (UniqueName: \"kubernetes.io/projected/27eb4674-477b-4189-857a-d5a572e89277-kube-api-access-wqnst\") pod \"swift-storage-0\" (UID: \"27eb4674-477b-4189-857a-d5a572e89277\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.303643 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"27eb4674-477b-4189-857a-d5a572e89277\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.303666 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/27eb4674-477b-4189-857a-d5a572e89277-etc-swift\") pod \"swift-storage-0\" (UID: \"27eb4674-477b-4189-857a-d5a572e89277\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.303753 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/27eb4674-477b-4189-857a-d5a572e89277-lock\") pod \"swift-storage-0\" (UID: \"27eb4674-477b-4189-857a-d5a572e89277\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.303822 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/27eb4674-477b-4189-857a-d5a572e89277-cache\") pod \"swift-storage-0\" (UID: \"27eb4674-477b-4189-857a-d5a572e89277\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:10:05 crc kubenswrapper[4721]: E1202 12:10:05.303940 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:10:05 crc kubenswrapper[4721]: E1202 12:10:05.303975 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.304014 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"27eb4674-477b-4189-857a-d5a572e89277\") device mount path \"/mnt/openstack/pv10\"" pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:10:05 crc kubenswrapper[4721]: E1202 12:10:05.304102 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/27eb4674-477b-4189-857a-d5a572e89277-etc-swift podName:27eb4674-477b-4189-857a-d5a572e89277 nodeName:}" failed. No retries permitted until 2025-12-02 12:10:05.804069399 +0000 UTC m=+1135.542619848 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/27eb4674-477b-4189-857a-d5a572e89277-etc-swift") pod "swift-storage-0" (UID: "27eb4674-477b-4189-857a-d5a572e89277") : configmap "swift-ring-files" not found Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.304301 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/27eb4674-477b-4189-857a-d5a572e89277-cache\") pod \"swift-storage-0\" (UID: \"27eb4674-477b-4189-857a-d5a572e89277\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.304407 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/27eb4674-477b-4189-857a-d5a572e89277-lock\") pod \"swift-storage-0\" (UID: \"27eb4674-477b-4189-857a-d5a572e89277\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.324555 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqnst\" (UniqueName: \"kubernetes.io/projected/27eb4674-477b-4189-857a-d5a572e89277-kube-api-access-wqnst\") pod \"swift-storage-0\" (UID: \"27eb4674-477b-4189-857a-d5a572e89277\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.328496 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"27eb4674-477b-4189-857a-d5a572e89277\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.474280 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z"] Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.475285 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.476893 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-proxy-config-data" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.490735 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z"] Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.607079 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-config-data\") pod \"swift-proxy-8b5dc4bcf-r7b9z\" (UID: \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.607127 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-etc-swift\") pod \"swift-proxy-8b5dc4bcf-r7b9z\" (UID: \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.607156 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5nnzx\" (UniqueName: \"kubernetes.io/projected/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-kube-api-access-5nnzx\") pod \"swift-proxy-8b5dc4bcf-r7b9z\" (UID: \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.607235 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-run-httpd\") pod \"swift-proxy-8b5dc4bcf-r7b9z\" (UID: \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.607312 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-log-httpd\") pod \"swift-proxy-8b5dc4bcf-r7b9z\" (UID: \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.665300 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-rr4ss"] Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.668756 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-rr4ss" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.673981 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.674771 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-rr4ss"] Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.675784 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.708998 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-log-httpd\") pod \"swift-proxy-8b5dc4bcf-r7b9z\" (UID: \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.709067 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-config-data\") pod \"swift-proxy-8b5dc4bcf-r7b9z\" (UID: \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.709096 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-etc-swift\") pod \"swift-proxy-8b5dc4bcf-r7b9z\" (UID: \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.709135 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sr6c6\" (UniqueName: \"kubernetes.io/projected/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-kube-api-access-sr6c6\") pod \"swift-ring-rebalance-rr4ss\" (UID: \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\") " pod="swift-kuttl-tests/swift-ring-rebalance-rr4ss" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.709162 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5nnzx\" (UniqueName: \"kubernetes.io/projected/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-kube-api-access-5nnzx\") pod \"swift-proxy-8b5dc4bcf-r7b9z\" (UID: \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" Dec 02 12:10:05 crc kubenswrapper[4721]: E1202 12:10:05.709280 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:10:05 crc kubenswrapper[4721]: E1202 12:10:05.709324 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z: configmap "swift-ring-files" not found Dec 02 12:10:05 crc kubenswrapper[4721]: E1202 12:10:05.709398 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-etc-swift podName:e1498f09-7529-4b3e-b2b8-a11b5a5a4624 nodeName:}" failed. No retries permitted until 2025-12-02 12:10:06.209377902 +0000 UTC m=+1135.947928341 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-etc-swift") pod "swift-proxy-8b5dc4bcf-r7b9z" (UID: "e1498f09-7529-4b3e-b2b8-a11b5a5a4624") : configmap "swift-ring-files" not found Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.709291 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-swiftconf\") pod \"swift-ring-rebalance-rr4ss\" (UID: \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\") " pod="swift-kuttl-tests/swift-ring-rebalance-rr4ss" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.709485 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-scripts\") pod \"swift-ring-rebalance-rr4ss\" (UID: \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\") " pod="swift-kuttl-tests/swift-ring-rebalance-rr4ss" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.709519 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-ring-data-devices\") pod \"swift-ring-rebalance-rr4ss\" (UID: \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\") " pod="swift-kuttl-tests/swift-ring-rebalance-rr4ss" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.709588 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-log-httpd\") pod \"swift-proxy-8b5dc4bcf-r7b9z\" (UID: \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.709604 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-run-httpd\") pod \"swift-proxy-8b5dc4bcf-r7b9z\" (UID: \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.709750 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-etc-swift\") pod \"swift-ring-rebalance-rr4ss\" (UID: \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\") " pod="swift-kuttl-tests/swift-ring-rebalance-rr4ss" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.709804 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-dispersionconf\") pod \"swift-ring-rebalance-rr4ss\" (UID: \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\") " pod="swift-kuttl-tests/swift-ring-rebalance-rr4ss" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.709923 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-run-httpd\") pod \"swift-proxy-8b5dc4bcf-r7b9z\" (UID: \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.714804 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-config-data\") pod \"swift-proxy-8b5dc4bcf-r7b9z\" (UID: \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.725366 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5nnzx\" (UniqueName: \"kubernetes.io/projected/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-kube-api-access-5nnzx\") pod \"swift-proxy-8b5dc4bcf-r7b9z\" (UID: \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.810861 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/27eb4674-477b-4189-857a-d5a572e89277-etc-swift\") pod \"swift-storage-0\" (UID: \"27eb4674-477b-4189-857a-d5a572e89277\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.810958 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sr6c6\" (UniqueName: \"kubernetes.io/projected/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-kube-api-access-sr6c6\") pod \"swift-ring-rebalance-rr4ss\" (UID: \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\") " pod="swift-kuttl-tests/swift-ring-rebalance-rr4ss" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.810995 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-swiftconf\") pod \"swift-ring-rebalance-rr4ss\" (UID: \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\") " pod="swift-kuttl-tests/swift-ring-rebalance-rr4ss" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.811026 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-scripts\") pod \"swift-ring-rebalance-rr4ss\" (UID: \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\") " pod="swift-kuttl-tests/swift-ring-rebalance-rr4ss" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.811067 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-ring-data-devices\") pod \"swift-ring-rebalance-rr4ss\" (UID: \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\") " pod="swift-kuttl-tests/swift-ring-rebalance-rr4ss" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.811124 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-etc-swift\") pod \"swift-ring-rebalance-rr4ss\" (UID: \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\") " pod="swift-kuttl-tests/swift-ring-rebalance-rr4ss" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.811149 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-dispersionconf\") pod \"swift-ring-rebalance-rr4ss\" (UID: \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\") " pod="swift-kuttl-tests/swift-ring-rebalance-rr4ss" Dec 02 12:10:05 crc kubenswrapper[4721]: E1202 12:10:05.811516 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:10:05 crc kubenswrapper[4721]: E1202 12:10:05.811610 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 02 12:10:05 crc kubenswrapper[4721]: E1202 12:10:05.811716 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/27eb4674-477b-4189-857a-d5a572e89277-etc-swift podName:27eb4674-477b-4189-857a-d5a572e89277 nodeName:}" failed. No retries permitted until 2025-12-02 12:10:06.811696481 +0000 UTC m=+1136.550246830 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/27eb4674-477b-4189-857a-d5a572e89277-etc-swift") pod "swift-storage-0" (UID: "27eb4674-477b-4189-857a-d5a572e89277") : configmap "swift-ring-files" not found Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.811829 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-etc-swift\") pod \"swift-ring-rebalance-rr4ss\" (UID: \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\") " pod="swift-kuttl-tests/swift-ring-rebalance-rr4ss" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.812465 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-scripts\") pod \"swift-ring-rebalance-rr4ss\" (UID: \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\") " pod="swift-kuttl-tests/swift-ring-rebalance-rr4ss" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.812792 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-ring-data-devices\") pod \"swift-ring-rebalance-rr4ss\" (UID: \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\") " pod="swift-kuttl-tests/swift-ring-rebalance-rr4ss" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.814896 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-swiftconf\") pod \"swift-ring-rebalance-rr4ss\" (UID: \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\") " pod="swift-kuttl-tests/swift-ring-rebalance-rr4ss" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.816157 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-dispersionconf\") pod \"swift-ring-rebalance-rr4ss\" (UID: \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\") " pod="swift-kuttl-tests/swift-ring-rebalance-rr4ss" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.832193 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sr6c6\" (UniqueName: \"kubernetes.io/projected/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-kube-api-access-sr6c6\") pod \"swift-ring-rebalance-rr4ss\" (UID: \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\") " pod="swift-kuttl-tests/swift-ring-rebalance-rr4ss" Dec 02 12:10:05 crc kubenswrapper[4721]: I1202 12:10:05.983079 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-rr4ss" Dec 02 12:10:06 crc kubenswrapper[4721]: I1202 12:10:06.217450 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-etc-swift\") pod \"swift-proxy-8b5dc4bcf-r7b9z\" (UID: \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" Dec 02 12:10:06 crc kubenswrapper[4721]: E1202 12:10:06.217606 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:10:06 crc kubenswrapper[4721]: E1202 12:10:06.217840 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z: configmap "swift-ring-files" not found Dec 02 12:10:06 crc kubenswrapper[4721]: E1202 12:10:06.217919 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-etc-swift podName:e1498f09-7529-4b3e-b2b8-a11b5a5a4624 nodeName:}" failed. No retries permitted until 2025-12-02 12:10:07.217897902 +0000 UTC m=+1136.956448261 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-etc-swift") pod "swift-proxy-8b5dc4bcf-r7b9z" (UID: "e1498f09-7529-4b3e-b2b8-a11b5a5a4624") : configmap "swift-ring-files" not found Dec 02 12:10:06 crc kubenswrapper[4721]: I1202 12:10:06.441944 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-rr4ss"] Dec 02 12:10:06 crc kubenswrapper[4721]: W1202 12:10:06.452068 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e9d42f0_8584_44ca_aafe_e72b7bef9dd0.slice/crio-e6fc98bce9e6b4d7484bd0aa481f4c0ac0aaab470c8d82508bd098c7e99e2e2c WatchSource:0}: Error finding container e6fc98bce9e6b4d7484bd0aa481f4c0ac0aaab470c8d82508bd098c7e99e2e2c: Status 404 returned error can't find the container with id e6fc98bce9e6b4d7484bd0aa481f4c0ac0aaab470c8d82508bd098c7e99e2e2c Dec 02 12:10:06 crc kubenswrapper[4721]: I1202 12:10:06.681277 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-rr4ss" event={"ID":"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0","Type":"ContainerStarted","Data":"e6fc98bce9e6b4d7484bd0aa481f4c0ac0aaab470c8d82508bd098c7e99e2e2c"} Dec 02 12:10:06 crc kubenswrapper[4721]: I1202 12:10:06.825192 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/27eb4674-477b-4189-857a-d5a572e89277-etc-swift\") pod \"swift-storage-0\" (UID: \"27eb4674-477b-4189-857a-d5a572e89277\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:10:06 crc kubenswrapper[4721]: E1202 12:10:06.825441 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:10:06 crc kubenswrapper[4721]: E1202 12:10:06.825477 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 02 12:10:06 crc kubenswrapper[4721]: E1202 12:10:06.825545 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/27eb4674-477b-4189-857a-d5a572e89277-etc-swift podName:27eb4674-477b-4189-857a-d5a572e89277 nodeName:}" failed. No retries permitted until 2025-12-02 12:10:08.825524652 +0000 UTC m=+1138.564075091 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/27eb4674-477b-4189-857a-d5a572e89277-etc-swift") pod "swift-storage-0" (UID: "27eb4674-477b-4189-857a-d5a572e89277") : configmap "swift-ring-files" not found Dec 02 12:10:07 crc kubenswrapper[4721]: I1202 12:10:07.231962 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-etc-swift\") pod \"swift-proxy-8b5dc4bcf-r7b9z\" (UID: \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" Dec 02 12:10:07 crc kubenswrapper[4721]: E1202 12:10:07.232132 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:10:07 crc kubenswrapper[4721]: E1202 12:10:07.232165 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z: configmap "swift-ring-files" not found Dec 02 12:10:07 crc kubenswrapper[4721]: E1202 12:10:07.232247 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-etc-swift podName:e1498f09-7529-4b3e-b2b8-a11b5a5a4624 nodeName:}" failed. No retries permitted until 2025-12-02 12:10:09.232222009 +0000 UTC m=+1138.970772398 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-etc-swift") pod "swift-proxy-8b5dc4bcf-r7b9z" (UID: "e1498f09-7529-4b3e-b2b8-a11b5a5a4624") : configmap "swift-ring-files" not found Dec 02 12:10:08 crc kubenswrapper[4721]: I1202 12:10:08.856783 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/27eb4674-477b-4189-857a-d5a572e89277-etc-swift\") pod \"swift-storage-0\" (UID: \"27eb4674-477b-4189-857a-d5a572e89277\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:10:08 crc kubenswrapper[4721]: E1202 12:10:08.856980 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:10:08 crc kubenswrapper[4721]: E1202 12:10:08.857174 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 02 12:10:08 crc kubenswrapper[4721]: E1202 12:10:08.857225 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/27eb4674-477b-4189-857a-d5a572e89277-etc-swift podName:27eb4674-477b-4189-857a-d5a572e89277 nodeName:}" failed. No retries permitted until 2025-12-02 12:10:12.857210419 +0000 UTC m=+1142.595760768 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/27eb4674-477b-4189-857a-d5a572e89277-etc-swift") pod "swift-storage-0" (UID: "27eb4674-477b-4189-857a-d5a572e89277") : configmap "swift-ring-files" not found Dec 02 12:10:09 crc kubenswrapper[4721]: I1202 12:10:09.269608 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-etc-swift\") pod \"swift-proxy-8b5dc4bcf-r7b9z\" (UID: \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" Dec 02 12:10:09 crc kubenswrapper[4721]: E1202 12:10:09.269885 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:10:09 crc kubenswrapper[4721]: E1202 12:10:09.269908 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z: configmap "swift-ring-files" not found Dec 02 12:10:09 crc kubenswrapper[4721]: E1202 12:10:09.269955 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-etc-swift podName:e1498f09-7529-4b3e-b2b8-a11b5a5a4624 nodeName:}" failed. No retries permitted until 2025-12-02 12:10:13.269938791 +0000 UTC m=+1143.008489140 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-etc-swift") pod "swift-proxy-8b5dc4bcf-r7b9z" (UID: "e1498f09-7529-4b3e-b2b8-a11b5a5a4624") : configmap "swift-ring-files" not found Dec 02 12:10:11 crc kubenswrapper[4721]: I1202 12:10:11.730393 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-rr4ss" event={"ID":"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0","Type":"ContainerStarted","Data":"593e4196f57767a499b514086441727d87211b11c9c65064e51d7ed545e8e456"} Dec 02 12:10:11 crc kubenswrapper[4721]: I1202 12:10:11.758398 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-rr4ss" podStartSLOduration=2.558135869 podStartE2EDuration="6.758368484s" podCreationTimestamp="2025-12-02 12:10:05 +0000 UTC" firstStartedPulling="2025-12-02 12:10:06.454196365 +0000 UTC m=+1136.192746714" lastFinishedPulling="2025-12-02 12:10:10.65442898 +0000 UTC m=+1140.392979329" observedRunningTime="2025-12-02 12:10:11.751857594 +0000 UTC m=+1141.490407953" watchObservedRunningTime="2025-12-02 12:10:11.758368484 +0000 UTC m=+1141.496918873" Dec 02 12:10:12 crc kubenswrapper[4721]: I1202 12:10:12.924541 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/27eb4674-477b-4189-857a-d5a572e89277-etc-swift\") pod \"swift-storage-0\" (UID: \"27eb4674-477b-4189-857a-d5a572e89277\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:10:12 crc kubenswrapper[4721]: E1202 12:10:12.925739 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:10:12 crc kubenswrapper[4721]: E1202 12:10:12.925758 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 02 12:10:12 crc kubenswrapper[4721]: E1202 12:10:12.925802 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/27eb4674-477b-4189-857a-d5a572e89277-etc-swift podName:27eb4674-477b-4189-857a-d5a572e89277 nodeName:}" failed. No retries permitted until 2025-12-02 12:10:20.925785131 +0000 UTC m=+1150.664335490 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/27eb4674-477b-4189-857a-d5a572e89277-etc-swift") pod "swift-storage-0" (UID: "27eb4674-477b-4189-857a-d5a572e89277") : configmap "swift-ring-files" not found Dec 02 12:10:13 crc kubenswrapper[4721]: I1202 12:10:13.329279 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-etc-swift\") pod \"swift-proxy-8b5dc4bcf-r7b9z\" (UID: \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" Dec 02 12:10:13 crc kubenswrapper[4721]: E1202 12:10:13.329850 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:10:13 crc kubenswrapper[4721]: E1202 12:10:13.329868 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z: configmap "swift-ring-files" not found Dec 02 12:10:13 crc kubenswrapper[4721]: E1202 12:10:13.329920 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-etc-swift podName:e1498f09-7529-4b3e-b2b8-a11b5a5a4624 nodeName:}" failed. No retries permitted until 2025-12-02 12:10:21.329901589 +0000 UTC m=+1151.068451948 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-etc-swift") pod "swift-proxy-8b5dc4bcf-r7b9z" (UID: "e1498f09-7529-4b3e-b2b8-a11b5a5a4624") : configmap "swift-ring-files" not found Dec 02 12:10:18 crc kubenswrapper[4721]: I1202 12:10:18.786736 4721 generic.go:334] "Generic (PLEG): container finished" podID="3e9d42f0-8584-44ca-aafe-e72b7bef9dd0" containerID="593e4196f57767a499b514086441727d87211b11c9c65064e51d7ed545e8e456" exitCode=0 Dec 02 12:10:18 crc kubenswrapper[4721]: I1202 12:10:18.786790 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-rr4ss" event={"ID":"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0","Type":"ContainerDied","Data":"593e4196f57767a499b514086441727d87211b11c9c65064e51d7ed545e8e456"} Dec 02 12:10:20 crc kubenswrapper[4721]: I1202 12:10:20.070021 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-rr4ss" Dec 02 12:10:20 crc kubenswrapper[4721]: I1202 12:10:20.232159 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sr6c6\" (UniqueName: \"kubernetes.io/projected/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-kube-api-access-sr6c6\") pod \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\" (UID: \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\") " Dec 02 12:10:20 crc kubenswrapper[4721]: I1202 12:10:20.232322 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-scripts\") pod \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\" (UID: \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\") " Dec 02 12:10:20 crc kubenswrapper[4721]: I1202 12:10:20.232406 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-dispersionconf\") pod \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\" (UID: \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\") " Dec 02 12:10:20 crc kubenswrapper[4721]: I1202 12:10:20.232594 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-swiftconf\") pod \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\" (UID: \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\") " Dec 02 12:10:20 crc kubenswrapper[4721]: I1202 12:10:20.232727 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-etc-swift\") pod \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\" (UID: \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\") " Dec 02 12:10:20 crc kubenswrapper[4721]: I1202 12:10:20.233460 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-ring-data-devices\") pod \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\" (UID: \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\") " Dec 02 12:10:20 crc kubenswrapper[4721]: I1202 12:10:20.233966 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "3e9d42f0-8584-44ca-aafe-e72b7bef9dd0" (UID: "3e9d42f0-8584-44ca-aafe-e72b7bef9dd0"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:10:20 crc kubenswrapper[4721]: I1202 12:10:20.234462 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "3e9d42f0-8584-44ca-aafe-e72b7bef9dd0" (UID: "3e9d42f0-8584-44ca-aafe-e72b7bef9dd0"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:10:20 crc kubenswrapper[4721]: I1202 12:10:20.239174 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-kube-api-access-sr6c6" (OuterVolumeSpecName: "kube-api-access-sr6c6") pod "3e9d42f0-8584-44ca-aafe-e72b7bef9dd0" (UID: "3e9d42f0-8584-44ca-aafe-e72b7bef9dd0"). InnerVolumeSpecName "kube-api-access-sr6c6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:10:20 crc kubenswrapper[4721]: E1202 12:10:20.252492 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-scripts podName:3e9d42f0-8584-44ca-aafe-e72b7bef9dd0 nodeName:}" failed. No retries permitted until 2025-12-02 12:10:20.752396292 +0000 UTC m=+1150.490946681 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "scripts" (UniqueName: "kubernetes.io/configmap/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-scripts") pod "3e9d42f0-8584-44ca-aafe-e72b7bef9dd0" (UID: "3e9d42f0-8584-44ca-aafe-e72b7bef9dd0") : error deleting /var/lib/kubelet/pods/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0/volume-subpaths: remove /var/lib/kubelet/pods/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0/volume-subpaths: no such file or directory Dec 02 12:10:20 crc kubenswrapper[4721]: I1202 12:10:20.254195 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "3e9d42f0-8584-44ca-aafe-e72b7bef9dd0" (UID: "3e9d42f0-8584-44ca-aafe-e72b7bef9dd0"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:10:20 crc kubenswrapper[4721]: I1202 12:10:20.254619 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "3e9d42f0-8584-44ca-aafe-e72b7bef9dd0" (UID: "3e9d42f0-8584-44ca-aafe-e72b7bef9dd0"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:10:20 crc kubenswrapper[4721]: I1202 12:10:20.334855 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sr6c6\" (UniqueName: \"kubernetes.io/projected/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-kube-api-access-sr6c6\") on node \"crc\" DevicePath \"\"" Dec 02 12:10:20 crc kubenswrapper[4721]: I1202 12:10:20.334915 4721 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:10:20 crc kubenswrapper[4721]: I1202 12:10:20.334942 4721 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:10:20 crc kubenswrapper[4721]: I1202 12:10:20.334967 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:10:20 crc kubenswrapper[4721]: I1202 12:10:20.334989 4721 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 02 12:10:20 crc kubenswrapper[4721]: I1202 12:10:20.837170 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-rr4ss" event={"ID":"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0","Type":"ContainerDied","Data":"e6fc98bce9e6b4d7484bd0aa481f4c0ac0aaab470c8d82508bd098c7e99e2e2c"} Dec 02 12:10:20 crc kubenswrapper[4721]: I1202 12:10:20.837456 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e6fc98bce9e6b4d7484bd0aa481f4c0ac0aaab470c8d82508bd098c7e99e2e2c" Dec 02 12:10:20 crc kubenswrapper[4721]: I1202 12:10:20.837283 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-rr4ss" Dec 02 12:10:20 crc kubenswrapper[4721]: I1202 12:10:20.841040 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-scripts\") pod \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\" (UID: \"3e9d42f0-8584-44ca-aafe-e72b7bef9dd0\") " Dec 02 12:10:20 crc kubenswrapper[4721]: I1202 12:10:20.841554 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-scripts" (OuterVolumeSpecName: "scripts") pod "3e9d42f0-8584-44ca-aafe-e72b7bef9dd0" (UID: "3e9d42f0-8584-44ca-aafe-e72b7bef9dd0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:10:20 crc kubenswrapper[4721]: I1202 12:10:20.943092 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/27eb4674-477b-4189-857a-d5a572e89277-etc-swift\") pod \"swift-storage-0\" (UID: \"27eb4674-477b-4189-857a-d5a572e89277\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:10:20 crc kubenswrapper[4721]: I1202 12:10:20.943436 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:10:20 crc kubenswrapper[4721]: I1202 12:10:20.947780 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/27eb4674-477b-4189-857a-d5a572e89277-etc-swift\") pod \"swift-storage-0\" (UID: \"27eb4674-477b-4189-857a-d5a572e89277\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:10:21 crc kubenswrapper[4721]: I1202 12:10:21.061032 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-ring-rebalance-rr4ss_3e9d42f0-8584-44ca-aafe-e72b7bef9dd0/swift-ring-rebalance/0.log" Dec 02 12:10:21 crc kubenswrapper[4721]: I1202 12:10:21.065540 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:10:21 crc kubenswrapper[4721]: I1202 12:10:21.350161 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-etc-swift\") pod \"swift-proxy-8b5dc4bcf-r7b9z\" (UID: \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" Dec 02 12:10:21 crc kubenswrapper[4721]: I1202 12:10:21.354947 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-etc-swift\") pod \"swift-proxy-8b5dc4bcf-r7b9z\" (UID: \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" Dec 02 12:10:21 crc kubenswrapper[4721]: I1202 12:10:21.428693 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" Dec 02 12:10:21 crc kubenswrapper[4721]: I1202 12:10:21.500729 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:10:21 crc kubenswrapper[4721]: I1202 12:10:21.845442 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerStarted","Data":"d641dfcd596b13facaa863878e3a218dc353a29484572bc490d78fd6f022252b"} Dec 02 12:10:21 crc kubenswrapper[4721]: I1202 12:10:21.847762 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z"] Dec 02 12:10:21 crc kubenswrapper[4721]: W1202 12:10:21.851485 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode1498f09_7529_4b3e_b2b8_a11b5a5a4624.slice/crio-c5f17305cb463f56aafb997e5f36496b998542fca4261290d938a41420e547fb WatchSource:0}: Error finding container c5f17305cb463f56aafb997e5f36496b998542fca4261290d938a41420e547fb: Status 404 returned error can't find the container with id c5f17305cb463f56aafb997e5f36496b998542fca4261290d938a41420e547fb Dec 02 12:10:22 crc kubenswrapper[4721]: I1202 12:10:22.634113 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-ring-rebalance-rr4ss_3e9d42f0-8584-44ca-aafe-e72b7bef9dd0/swift-ring-rebalance/0.log" Dec 02 12:10:22 crc kubenswrapper[4721]: I1202 12:10:22.880125 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" event={"ID":"e1498f09-7529-4b3e-b2b8-a11b5a5a4624","Type":"ContainerStarted","Data":"47b26f0b8b19e903cbad514410be260867bb04d8bb7a11b867cec9ac05438844"} Dec 02 12:10:22 crc kubenswrapper[4721]: I1202 12:10:22.880207 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" event={"ID":"e1498f09-7529-4b3e-b2b8-a11b5a5a4624","Type":"ContainerStarted","Data":"2a29f9fe2c6a43104ceaa969dc2f696866343069ca88299ea997c871d7ee689e"} Dec 02 12:10:22 crc kubenswrapper[4721]: I1202 12:10:22.880224 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" event={"ID":"e1498f09-7529-4b3e-b2b8-a11b5a5a4624","Type":"ContainerStarted","Data":"c5f17305cb463f56aafb997e5f36496b998542fca4261290d938a41420e547fb"} Dec 02 12:10:22 crc kubenswrapper[4721]: I1202 12:10:22.880488 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" Dec 02 12:10:23 crc kubenswrapper[4721]: I1202 12:10:23.895000 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerStarted","Data":"099a21d19c8049b4e27934f6185161bf78b3d33f6e2df9eeb2d31c3c23a8f3fc"} Dec 02 12:10:23 crc kubenswrapper[4721]: I1202 12:10:23.895391 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerStarted","Data":"9e68c8145c46c51de310d82790fa92bfbcf3508bfe0b206a0a0f365566661343"} Dec 02 12:10:23 crc kubenswrapper[4721]: I1202 12:10:23.895411 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" Dec 02 12:10:23 crc kubenswrapper[4721]: I1202 12:10:23.895426 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerStarted","Data":"f5a932dca5f2810a25a775136f518bba1a4e05d017c2f1ed37482e545105d6dd"} Dec 02 12:10:23 crc kubenswrapper[4721]: I1202 12:10:23.895437 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerStarted","Data":"823ead0e1dba1a6808b1545c985f36a5a06fbfff4dd41b9505ef04739ce6c35b"} Dec 02 12:10:24 crc kubenswrapper[4721]: I1202 12:10:24.242621 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-ring-rebalance-rr4ss_3e9d42f0-8584-44ca-aafe-e72b7bef9dd0/swift-ring-rebalance/0.log" Dec 02 12:10:24 crc kubenswrapper[4721]: I1202 12:10:24.926798 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerStarted","Data":"a92cbb3c16ac4018f46d76bdd233a690ebdb976f966bacbd6f10368289751f89"} Dec 02 12:10:25 crc kubenswrapper[4721]: I1202 12:10:25.883210 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-ring-rebalance-rr4ss_3e9d42f0-8584-44ca-aafe-e72b7bef9dd0/swift-ring-rebalance/0.log" Dec 02 12:10:25 crc kubenswrapper[4721]: I1202 12:10:25.941442 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerStarted","Data":"65799d55d1b5c936b5df5cb2f6d3f6736f2488ece2c9a056c8dbfaa7346f84d6"} Dec 02 12:10:25 crc kubenswrapper[4721]: I1202 12:10:25.941522 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerStarted","Data":"b583c65dcf1e1667c262d27f780ae405536c139dc4ae684d0b25d3ecfaeedeb2"} Dec 02 12:10:25 crc kubenswrapper[4721]: I1202 12:10:25.941544 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerStarted","Data":"f176422a2d531c26166f96b401d351a6fe34b52eb8bece2163366d4beaefbff4"} Dec 02 12:10:26 crc kubenswrapper[4721]: I1202 12:10:26.956127 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerStarted","Data":"5467bc17f876398c85911f415554a36185c7dd84bbff64110bf3c279c8483200"} Dec 02 12:10:26 crc kubenswrapper[4721]: I1202 12:10:26.956427 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerStarted","Data":"cd57944d6e44a8ede134d360793593fc774b90b4826e4d2196762778b5fc087c"} Dec 02 12:10:27 crc kubenswrapper[4721]: I1202 12:10:27.523964 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-ring-rebalance-rr4ss_3e9d42f0-8584-44ca-aafe-e72b7bef9dd0/swift-ring-rebalance/0.log" Dec 02 12:10:27 crc kubenswrapper[4721]: I1202 12:10:27.970275 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerStarted","Data":"6fd69500bf05386473742672cfdfabd0ba5e53c6ea2849a034a010998107fd62"} Dec 02 12:10:27 crc kubenswrapper[4721]: I1202 12:10:27.970313 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerStarted","Data":"1e07972f5499cd9722b51c64d003837027ad296aac2851843b87f31515436f89"} Dec 02 12:10:27 crc kubenswrapper[4721]: I1202 12:10:27.970324 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerStarted","Data":"1a58821aa2d18c78cf9ee3ebed4af2694174839aabf139e0e6f945a3a54dd270"} Dec 02 12:10:27 crc kubenswrapper[4721]: I1202 12:10:27.970332 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerStarted","Data":"e5aa55bbc88958f81c0074e5ce8a912e3147aa864c5b2b850db9efb3ab41ddd6"} Dec 02 12:10:27 crc kubenswrapper[4721]: I1202 12:10:27.970359 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerStarted","Data":"270c4fb66daff1d8d57a60b18580156fc9694244d1f0de1bec2c8ac2f36f9907"} Dec 02 12:10:28 crc kubenswrapper[4721]: I1202 12:10:28.014398 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" podStartSLOduration=23.014372712 podStartE2EDuration="23.014372712s" podCreationTimestamp="2025-12-02 12:10:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:10:22.905299417 +0000 UTC m=+1152.643849766" watchObservedRunningTime="2025-12-02 12:10:28.014372712 +0000 UTC m=+1157.752923081" Dec 02 12:10:28 crc kubenswrapper[4721]: I1202 12:10:28.018938 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-storage-0" podStartSLOduration=18.940299113000002 podStartE2EDuration="24.018919821s" podCreationTimestamp="2025-12-02 12:10:04 +0000 UTC" firstStartedPulling="2025-12-02 12:10:21.514359079 +0000 UTC m=+1151.252909428" lastFinishedPulling="2025-12-02 12:10:26.592979767 +0000 UTC m=+1156.331530136" observedRunningTime="2025-12-02 12:10:28.004323962 +0000 UTC m=+1157.742874321" watchObservedRunningTime="2025-12-02 12:10:28.018919821 +0000 UTC m=+1157.757470190" Dec 02 12:10:29 crc kubenswrapper[4721]: I1202 12:10:29.149155 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-ring-rebalance-rr4ss_3e9d42f0-8584-44ca-aafe-e72b7bef9dd0/swift-ring-rebalance/0.log" Dec 02 12:10:30 crc kubenswrapper[4721]: I1202 12:10:30.715869 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-ring-rebalance-rr4ss_3e9d42f0-8584-44ca-aafe-e72b7bef9dd0/swift-ring-rebalance/0.log" Dec 02 12:10:31 crc kubenswrapper[4721]: I1202 12:10:31.431302 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" Dec 02 12:10:31 crc kubenswrapper[4721]: I1202 12:10:31.433468 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" Dec 02 12:10:32 crc kubenswrapper[4721]: I1202 12:10:32.300603 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-ring-rebalance-rr4ss_3e9d42f0-8584-44ca-aafe-e72b7bef9dd0/swift-ring-rebalance/0.log" Dec 02 12:10:33 crc kubenswrapper[4721]: I1202 12:10:33.924997 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/swift-kuttl-tests_swift-ring-rebalance-rr4ss_3e9d42f0-8584-44ca-aafe-e72b7bef9dd0/swift-ring-rebalance/0.log" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.388028 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Dec 02 12:10:35 crc kubenswrapper[4721]: E1202 12:10:35.388399 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e9d42f0-8584-44ca-aafe-e72b7bef9dd0" containerName="swift-ring-rebalance" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.388418 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e9d42f0-8584-44ca-aafe-e72b7bef9dd0" containerName="swift-ring-rebalance" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.388631 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e9d42f0-8584-44ca-aafe-e72b7bef9dd0" containerName="swift-ring-rebalance" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.396041 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.405486 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.411019 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.416982 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.424801 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.506134 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-rr4ss"] Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.514555 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-rr4ss"] Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.522134 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-p5vcf"] Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.523192 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-p5vcf" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.525454 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.525460 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.531310 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-p5vcf"] Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.573667 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/09e554d6-e3a5-437e-a153-3ed91fadb78d-lock\") pod \"swift-storage-2\" (UID: \"09e554d6-e3a5-437e-a153-3ed91fadb78d\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.573704 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1cff80c4-dc01-4e68-9cb7-ae41a5f05973-etc-swift\") pod \"swift-storage-1\" (UID: \"1cff80c4-dc01-4e68-9cb7-ae41a5f05973\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.573724 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/1cff80c4-dc01-4e68-9cb7-ae41a5f05973-lock\") pod \"swift-storage-1\" (UID: \"1cff80c4-dc01-4e68-9cb7-ae41a5f05973\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.573755 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/1cff80c4-dc01-4e68-9cb7-ae41a5f05973-cache\") pod \"swift-storage-1\" (UID: \"1cff80c4-dc01-4e68-9cb7-ae41a5f05973\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.573800 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"swift-storage-2\" (UID: \"09e554d6-e3a5-437e-a153-3ed91fadb78d\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.573829 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/09e554d6-e3a5-437e-a153-3ed91fadb78d-etc-swift\") pod \"swift-storage-2\" (UID: \"09e554d6-e3a5-437e-a153-3ed91fadb78d\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.573862 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldgqb\" (UniqueName: \"kubernetes.io/projected/09e554d6-e3a5-437e-a153-3ed91fadb78d-kube-api-access-ldgqb\") pod \"swift-storage-2\" (UID: \"09e554d6-e3a5-437e-a153-3ed91fadb78d\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.573880 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrscg\" (UniqueName: \"kubernetes.io/projected/1cff80c4-dc01-4e68-9cb7-ae41a5f05973-kube-api-access-rrscg\") pod \"swift-storage-1\" (UID: \"1cff80c4-dc01-4e68-9cb7-ae41a5f05973\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.573898 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/09e554d6-e3a5-437e-a153-3ed91fadb78d-cache\") pod \"swift-storage-2\" (UID: \"09e554d6-e3a5-437e-a153-3ed91fadb78d\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.573922 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-1\" (UID: \"1cff80c4-dc01-4e68-9cb7-ae41a5f05973\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.675215 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/09e554d6-e3a5-437e-a153-3ed91fadb78d-lock\") pod \"swift-storage-2\" (UID: \"09e554d6-e3a5-437e-a153-3ed91fadb78d\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.675511 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1cff80c4-dc01-4e68-9cb7-ae41a5f05973-etc-swift\") pod \"swift-storage-1\" (UID: \"1cff80c4-dc01-4e68-9cb7-ae41a5f05973\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.675537 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-etc-swift\") pod \"swift-ring-rebalance-p5vcf\" (UID: \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\") " pod="swift-kuttl-tests/swift-ring-rebalance-p5vcf" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.675557 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-ring-data-devices\") pod \"swift-ring-rebalance-p5vcf\" (UID: \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\") " pod="swift-kuttl-tests/swift-ring-rebalance-p5vcf" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.675575 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/1cff80c4-dc01-4e68-9cb7-ae41a5f05973-lock\") pod \"swift-storage-1\" (UID: \"1cff80c4-dc01-4e68-9cb7-ae41a5f05973\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.675598 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-dispersionconf\") pod \"swift-ring-rebalance-p5vcf\" (UID: \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\") " pod="swift-kuttl-tests/swift-ring-rebalance-p5vcf" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.675623 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/1cff80c4-dc01-4e68-9cb7-ae41a5f05973-cache\") pod \"swift-storage-1\" (UID: \"1cff80c4-dc01-4e68-9cb7-ae41a5f05973\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.675651 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"swift-storage-2\" (UID: \"09e554d6-e3a5-437e-a153-3ed91fadb78d\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.675672 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s94b5\" (UniqueName: \"kubernetes.io/projected/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-kube-api-access-s94b5\") pod \"swift-ring-rebalance-p5vcf\" (UID: \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\") " pod="swift-kuttl-tests/swift-ring-rebalance-p5vcf" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.675697 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-scripts\") pod \"swift-ring-rebalance-p5vcf\" (UID: \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\") " pod="swift-kuttl-tests/swift-ring-rebalance-p5vcf" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.675713 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/09e554d6-e3a5-437e-a153-3ed91fadb78d-etc-swift\") pod \"swift-storage-2\" (UID: \"09e554d6-e3a5-437e-a153-3ed91fadb78d\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.675744 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldgqb\" (UniqueName: \"kubernetes.io/projected/09e554d6-e3a5-437e-a153-3ed91fadb78d-kube-api-access-ldgqb\") pod \"swift-storage-2\" (UID: \"09e554d6-e3a5-437e-a153-3ed91fadb78d\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.675761 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrscg\" (UniqueName: \"kubernetes.io/projected/1cff80c4-dc01-4e68-9cb7-ae41a5f05973-kube-api-access-rrscg\") pod \"swift-storage-1\" (UID: \"1cff80c4-dc01-4e68-9cb7-ae41a5f05973\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.675781 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/09e554d6-e3a5-437e-a153-3ed91fadb78d-cache\") pod \"swift-storage-2\" (UID: \"09e554d6-e3a5-437e-a153-3ed91fadb78d\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.675795 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-1\" (UID: \"1cff80c4-dc01-4e68-9cb7-ae41a5f05973\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.675812 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-swiftconf\") pod \"swift-ring-rebalance-p5vcf\" (UID: \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\") " pod="swift-kuttl-tests/swift-ring-rebalance-p5vcf" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.675884 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/09e554d6-e3a5-437e-a153-3ed91fadb78d-lock\") pod \"swift-storage-2\" (UID: \"09e554d6-e3a5-437e-a153-3ed91fadb78d\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.676203 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/1cff80c4-dc01-4e68-9cb7-ae41a5f05973-lock\") pod \"swift-storage-1\" (UID: \"1cff80c4-dc01-4e68-9cb7-ae41a5f05973\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.676224 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/1cff80c4-dc01-4e68-9cb7-ae41a5f05973-cache\") pod \"swift-storage-1\" (UID: \"1cff80c4-dc01-4e68-9cb7-ae41a5f05973\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.676499 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"swift-storage-2\" (UID: \"09e554d6-e3a5-437e-a153-3ed91fadb78d\") device mount path \"/mnt/openstack/pv01\"" pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.676677 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-1\" (UID: \"1cff80c4-dc01-4e68-9cb7-ae41a5f05973\") device mount path \"/mnt/openstack/pv11\"" pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.676902 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/09e554d6-e3a5-437e-a153-3ed91fadb78d-cache\") pod \"swift-storage-2\" (UID: \"09e554d6-e3a5-437e-a153-3ed91fadb78d\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.682713 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/09e554d6-e3a5-437e-a153-3ed91fadb78d-etc-swift\") pod \"swift-storage-2\" (UID: \"09e554d6-e3a5-437e-a153-3ed91fadb78d\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.686229 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1cff80c4-dc01-4e68-9cb7-ae41a5f05973-etc-swift\") pod \"swift-storage-1\" (UID: \"1cff80c4-dc01-4e68-9cb7-ae41a5f05973\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.696273 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldgqb\" (UniqueName: \"kubernetes.io/projected/09e554d6-e3a5-437e-a153-3ed91fadb78d-kube-api-access-ldgqb\") pod \"swift-storage-2\" (UID: \"09e554d6-e3a5-437e-a153-3ed91fadb78d\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.698236 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"swift-storage-2\" (UID: \"09e554d6-e3a5-437e-a153-3ed91fadb78d\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.700180 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-1\" (UID: \"1cff80c4-dc01-4e68-9cb7-ae41a5f05973\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.707414 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrscg\" (UniqueName: \"kubernetes.io/projected/1cff80c4-dc01-4e68-9cb7-ae41a5f05973-kube-api-access-rrscg\") pod \"swift-storage-1\" (UID: \"1cff80c4-dc01-4e68-9cb7-ae41a5f05973\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.737637 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.745256 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.777388 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-etc-swift\") pod \"swift-ring-rebalance-p5vcf\" (UID: \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\") " pod="swift-kuttl-tests/swift-ring-rebalance-p5vcf" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.776768 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-etc-swift\") pod \"swift-ring-rebalance-p5vcf\" (UID: \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\") " pod="swift-kuttl-tests/swift-ring-rebalance-p5vcf" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.778062 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-ring-data-devices\") pod \"swift-ring-rebalance-p5vcf\" (UID: \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\") " pod="swift-kuttl-tests/swift-ring-rebalance-p5vcf" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.778249 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-dispersionconf\") pod \"swift-ring-rebalance-p5vcf\" (UID: \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\") " pod="swift-kuttl-tests/swift-ring-rebalance-p5vcf" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.778484 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s94b5\" (UniqueName: \"kubernetes.io/projected/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-kube-api-access-s94b5\") pod \"swift-ring-rebalance-p5vcf\" (UID: \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\") " pod="swift-kuttl-tests/swift-ring-rebalance-p5vcf" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.778672 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-scripts\") pod \"swift-ring-rebalance-p5vcf\" (UID: \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\") " pod="swift-kuttl-tests/swift-ring-rebalance-p5vcf" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.779088 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-swiftconf\") pod \"swift-ring-rebalance-p5vcf\" (UID: \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\") " pod="swift-kuttl-tests/swift-ring-rebalance-p5vcf" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.779505 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-scripts\") pod \"swift-ring-rebalance-p5vcf\" (UID: \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\") " pod="swift-kuttl-tests/swift-ring-rebalance-p5vcf" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.778816 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-ring-data-devices\") pod \"swift-ring-rebalance-p5vcf\" (UID: \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\") " pod="swift-kuttl-tests/swift-ring-rebalance-p5vcf" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.782211 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-dispersionconf\") pod \"swift-ring-rebalance-p5vcf\" (UID: \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\") " pod="swift-kuttl-tests/swift-ring-rebalance-p5vcf" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.784232 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-swiftconf\") pod \"swift-ring-rebalance-p5vcf\" (UID: \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\") " pod="swift-kuttl-tests/swift-ring-rebalance-p5vcf" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.794924 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s94b5\" (UniqueName: \"kubernetes.io/projected/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-kube-api-access-s94b5\") pod \"swift-ring-rebalance-p5vcf\" (UID: \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\") " pod="swift-kuttl-tests/swift-ring-rebalance-p5vcf" Dec 02 12:10:35 crc kubenswrapper[4721]: I1202 12:10:35.840006 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-p5vcf" Dec 02 12:10:36 crc kubenswrapper[4721]: I1202 12:10:36.209591 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Dec 02 12:10:36 crc kubenswrapper[4721]: W1202 12:10:36.210380 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09e554d6_e3a5_437e_a153_3ed91fadb78d.slice/crio-03e2c64a72b3eb763aaf870d3ec53052db9bea2a45adc28cd17ab12c86a882e0 WatchSource:0}: Error finding container 03e2c64a72b3eb763aaf870d3ec53052db9bea2a45adc28cd17ab12c86a882e0: Status 404 returned error can't find the container with id 03e2c64a72b3eb763aaf870d3ec53052db9bea2a45adc28cd17ab12c86a882e0 Dec 02 12:10:36 crc kubenswrapper[4721]: I1202 12:10:36.274585 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Dec 02 12:10:36 crc kubenswrapper[4721]: I1202 12:10:36.309202 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-p5vcf"] Dec 02 12:10:36 crc kubenswrapper[4721]: W1202 12:10:36.320014 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2b1bfb1c_aa60_4f1c_a1e9_007c2118d139.slice/crio-eb513383c35f93985f79c8e28da26d8bb62f8df1562a6399d96a36d86b5de49f WatchSource:0}: Error finding container eb513383c35f93985f79c8e28da26d8bb62f8df1562a6399d96a36d86b5de49f: Status 404 returned error can't find the container with id eb513383c35f93985f79c8e28da26d8bb62f8df1562a6399d96a36d86b5de49f Dec 02 12:10:36 crc kubenswrapper[4721]: I1202 12:10:36.861753 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e9d42f0-8584-44ca-aafe-e72b7bef9dd0" path="/var/lib/kubelet/pods/3e9d42f0-8584-44ca-aafe-e72b7bef9dd0/volumes" Dec 02 12:10:37 crc kubenswrapper[4721]: I1202 12:10:37.082714 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerStarted","Data":"088dff10742ea220c32ab9d82c56f0c66f001abfc1d5937f1374200d556f515c"} Dec 02 12:10:37 crc kubenswrapper[4721]: I1202 12:10:37.082756 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerStarted","Data":"f513e4ce4313743e980239aeb5eefca6c4b1e395a3c36451bf6d2161c7366c64"} Dec 02 12:10:37 crc kubenswrapper[4721]: I1202 12:10:37.082768 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerStarted","Data":"802178876a4950d3ce589b0214d46ed8a710139f5d544d3417bb944c86774e96"} Dec 02 12:10:37 crc kubenswrapper[4721]: I1202 12:10:37.082777 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerStarted","Data":"03c4f2324972362948f96ee55e110ab525ab6abfa434439a0e4df3e0a866ff71"} Dec 02 12:10:37 crc kubenswrapper[4721]: I1202 12:10:37.082789 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerStarted","Data":"995b2f3e6c864718f742047ed355ec10f9b204fdfff5a65859243e596dc1f883"} Dec 02 12:10:37 crc kubenswrapper[4721]: I1202 12:10:37.082797 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerStarted","Data":"f855b21a8cf33758339f3717667d47c53ff48cef0dd22280502c05c02e3f259c"} Dec 02 12:10:37 crc kubenswrapper[4721]: I1202 12:10:37.084608 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-p5vcf" event={"ID":"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139","Type":"ContainerStarted","Data":"8c534a6b39d953cdb87279019d88bfb7307b6abe29af4a7dd5ea0994849fb449"} Dec 02 12:10:37 crc kubenswrapper[4721]: I1202 12:10:37.084636 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-p5vcf" event={"ID":"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139","Type":"ContainerStarted","Data":"eb513383c35f93985f79c8e28da26d8bb62f8df1562a6399d96a36d86b5de49f"} Dec 02 12:10:37 crc kubenswrapper[4721]: I1202 12:10:37.089696 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerStarted","Data":"ce28b6e9f05405a0b6043c588b3af01293215c9b71a887847201c5e25e84c087"} Dec 02 12:10:37 crc kubenswrapper[4721]: I1202 12:10:37.089730 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerStarted","Data":"a87b177f0540cc3e19c31eac0f39199242540d018e4084a050ad66d6f05b5476"} Dec 02 12:10:37 crc kubenswrapper[4721]: I1202 12:10:37.089743 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerStarted","Data":"24ba6660ddc36471c262bd11a52bbe8a3bad428befacec5ddf02eedc95c3e6d0"} Dec 02 12:10:37 crc kubenswrapper[4721]: I1202 12:10:37.089753 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerStarted","Data":"c98f9453ed9e75ea4836cdf8e6b5a3244e10b1a2aa391d76ff2f94bb0544c315"} Dec 02 12:10:37 crc kubenswrapper[4721]: I1202 12:10:37.089763 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerStarted","Data":"03e2c64a72b3eb763aaf870d3ec53052db9bea2a45adc28cd17ab12c86a882e0"} Dec 02 12:10:37 crc kubenswrapper[4721]: I1202 12:10:37.104888 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-p5vcf" podStartSLOduration=2.104870158 podStartE2EDuration="2.104870158s" podCreationTimestamp="2025-12-02 12:10:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:10:37.09811025 +0000 UTC m=+1166.836660599" watchObservedRunningTime="2025-12-02 12:10:37.104870158 +0000 UTC m=+1166.843420507" Dec 02 12:10:38 crc kubenswrapper[4721]: I1202 12:10:38.101029 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerStarted","Data":"a8f531ad5245090f9458ffaf6b15767b2cf1e267e55587129ff8745980b7dc83"} Dec 02 12:10:38 crc kubenswrapper[4721]: I1202 12:10:38.101362 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerStarted","Data":"40af61249b0b7b921b5139f31ceff4851448a0fad0e7bd3973356446ae2b0851"} Dec 02 12:10:38 crc kubenswrapper[4721]: I1202 12:10:38.101375 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerStarted","Data":"821584e74f43e336348ba071bdabdfc3e5d8e843b62a6261110261e0b92582a8"} Dec 02 12:10:38 crc kubenswrapper[4721]: I1202 12:10:38.101384 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerStarted","Data":"e83dc368eb5822bf3f3d2476ee5ed27744fdcb5dcfd00abcf7a01c1b0ae2e44d"} Dec 02 12:10:38 crc kubenswrapper[4721]: I1202 12:10:38.101394 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerStarted","Data":"c9ad5b423e11c9c124a474d325df92864a94a39e95c34bdf81c15770a2e3b3ff"} Dec 02 12:10:38 crc kubenswrapper[4721]: I1202 12:10:38.109194 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerStarted","Data":"33b1828595b8c80890eae52dfcd95d97abd22b8b0c3f51c5b0f3914c5c94004e"} Dec 02 12:10:38 crc kubenswrapper[4721]: I1202 12:10:38.109249 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerStarted","Data":"399113c68a08ec5da56d1d064dffcf8274965c542e9200e84cddcda9cbbf4424"} Dec 02 12:10:38 crc kubenswrapper[4721]: I1202 12:10:38.109263 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerStarted","Data":"b209ace06b46ae1736b7777c480e2ebbec7d2d80b33606460a3e42f6eb9e8b63"} Dec 02 12:10:38 crc kubenswrapper[4721]: I1202 12:10:38.109277 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerStarted","Data":"e232483cb243215b722c6f0bfb3531c2d52e026ab0148df621f5a505cc098d6c"} Dec 02 12:10:38 crc kubenswrapper[4721]: I1202 12:10:38.109289 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerStarted","Data":"18b6e4a6f8e7df1af99a0dd9ecd2a89682cf778ff4a5070ecad697cafcc9de71"} Dec 02 12:10:38 crc kubenswrapper[4721]: I1202 12:10:38.109303 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerStarted","Data":"eac8db7656343e113eab7fa319706f4aa7e3ffd7817df882e1564f9bd3a0f1cf"} Dec 02 12:10:38 crc kubenswrapper[4721]: I1202 12:10:38.109313 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerStarted","Data":"6a14acfad3a520fab2c978ed50126f1f07a27f4477cf6fce3e94912e0a93ef5b"} Dec 02 12:10:39 crc kubenswrapper[4721]: I1202 12:10:39.125145 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerStarted","Data":"bca906d1cc3affbe17a28df28f0505ed399d602c1464a98f21b5efa1f72368a3"} Dec 02 12:10:39 crc kubenswrapper[4721]: I1202 12:10:39.125677 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerStarted","Data":"fdace05da094705c18217f06a7a51510f7e8b78478183bbe68593716361576c1"} Dec 02 12:10:39 crc kubenswrapper[4721]: I1202 12:10:39.125693 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerStarted","Data":"48d0a3ea165bcf7be03ce424410c59c646b11008f505aedca257771e05d47d7b"} Dec 02 12:10:39 crc kubenswrapper[4721]: I1202 12:10:39.125711 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerStarted","Data":"e204416ba36c46cda9a223da1214bf8edf260cb4e5c3045aa5a803e1a0724a6c"} Dec 02 12:10:39 crc kubenswrapper[4721]: I1202 12:10:39.125725 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerStarted","Data":"ce6811fa26efd6c19e05e8fbedc6f1bba94e07d1697b8654d725090db0856174"} Dec 02 12:10:39 crc kubenswrapper[4721]: I1202 12:10:39.131629 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerStarted","Data":"d9453615c91a9028a3fa90cd00b70ad0567371011d6702dbb05879e5b2901d09"} Dec 02 12:10:39 crc kubenswrapper[4721]: I1202 12:10:39.131693 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerStarted","Data":"7a3d988b6f2d9267bde321ead3e578f3e1b5cc60aa115a5283457bd419e54df0"} Dec 02 12:10:39 crc kubenswrapper[4721]: I1202 12:10:39.131712 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerStarted","Data":"79048ad547310d151b543ca2c4d92123297c5d7450460d18f72db7d8fafb3fac"} Dec 02 12:10:39 crc kubenswrapper[4721]: I1202 12:10:39.131725 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerStarted","Data":"ff56691d3a9c1f98185ec467975a12a9eb5195656a43f650c14f5027db84c79c"} Dec 02 12:10:39 crc kubenswrapper[4721]: I1202 12:10:39.165069 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-storage-1" podStartSLOduration=5.165051452 podStartE2EDuration="5.165051452s" podCreationTimestamp="2025-12-02 12:10:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:10:39.16048526 +0000 UTC m=+1168.899035609" watchObservedRunningTime="2025-12-02 12:10:39.165051452 +0000 UTC m=+1168.903601801" Dec 02 12:10:39 crc kubenswrapper[4721]: I1202 12:10:39.203949 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-storage-2" podStartSLOduration=5.203930028 podStartE2EDuration="5.203930028s" podCreationTimestamp="2025-12-02 12:10:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:10:39.198212032 +0000 UTC m=+1168.936762381" watchObservedRunningTime="2025-12-02 12:10:39.203930028 +0000 UTC m=+1168.942480377" Dec 02 12:10:45 crc kubenswrapper[4721]: I1202 12:10:45.186573 4721 generic.go:334] "Generic (PLEG): container finished" podID="2b1bfb1c-aa60-4f1c-a1e9-007c2118d139" containerID="8c534a6b39d953cdb87279019d88bfb7307b6abe29af4a7dd5ea0994849fb449" exitCode=0 Dec 02 12:10:45 crc kubenswrapper[4721]: I1202 12:10:45.186669 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-p5vcf" event={"ID":"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139","Type":"ContainerDied","Data":"8c534a6b39d953cdb87279019d88bfb7307b6abe29af4a7dd5ea0994849fb449"} Dec 02 12:10:46 crc kubenswrapper[4721]: I1202 12:10:46.445946 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-p5vcf" Dec 02 12:10:46 crc kubenswrapper[4721]: I1202 12:10:46.545167 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-dispersionconf\") pod \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\" (UID: \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\") " Dec 02 12:10:46 crc kubenswrapper[4721]: I1202 12:10:46.545293 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-ring-data-devices\") pod \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\" (UID: \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\") " Dec 02 12:10:46 crc kubenswrapper[4721]: I1202 12:10:46.545447 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-etc-swift\") pod \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\" (UID: \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\") " Dec 02 12:10:46 crc kubenswrapper[4721]: I1202 12:10:46.545472 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s94b5\" (UniqueName: \"kubernetes.io/projected/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-kube-api-access-s94b5\") pod \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\" (UID: \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\") " Dec 02 12:10:46 crc kubenswrapper[4721]: I1202 12:10:46.545513 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-swiftconf\") pod \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\" (UID: \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\") " Dec 02 12:10:46 crc kubenswrapper[4721]: I1202 12:10:46.545567 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-scripts\") pod \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\" (UID: \"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139\") " Dec 02 12:10:46 crc kubenswrapper[4721]: I1202 12:10:46.546063 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "2b1bfb1c-aa60-4f1c-a1e9-007c2118d139" (UID: "2b1bfb1c-aa60-4f1c-a1e9-007c2118d139"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:10:46 crc kubenswrapper[4721]: I1202 12:10:46.546332 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "2b1bfb1c-aa60-4f1c-a1e9-007c2118d139" (UID: "2b1bfb1c-aa60-4f1c-a1e9-007c2118d139"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:10:46 crc kubenswrapper[4721]: I1202 12:10:46.553299 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-kube-api-access-s94b5" (OuterVolumeSpecName: "kube-api-access-s94b5") pod "2b1bfb1c-aa60-4f1c-a1e9-007c2118d139" (UID: "2b1bfb1c-aa60-4f1c-a1e9-007c2118d139"). InnerVolumeSpecName "kube-api-access-s94b5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:10:46 crc kubenswrapper[4721]: I1202 12:10:46.565993 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-scripts" (OuterVolumeSpecName: "scripts") pod "2b1bfb1c-aa60-4f1c-a1e9-007c2118d139" (UID: "2b1bfb1c-aa60-4f1c-a1e9-007c2118d139"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:10:46 crc kubenswrapper[4721]: I1202 12:10:46.567820 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "2b1bfb1c-aa60-4f1c-a1e9-007c2118d139" (UID: "2b1bfb1c-aa60-4f1c-a1e9-007c2118d139"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:10:46 crc kubenswrapper[4721]: I1202 12:10:46.570381 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "2b1bfb1c-aa60-4f1c-a1e9-007c2118d139" (UID: "2b1bfb1c-aa60-4f1c-a1e9-007c2118d139"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:10:46 crc kubenswrapper[4721]: I1202 12:10:46.647189 4721 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 02 12:10:46 crc kubenswrapper[4721]: I1202 12:10:46.647235 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:10:46 crc kubenswrapper[4721]: I1202 12:10:46.647245 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s94b5\" (UniqueName: \"kubernetes.io/projected/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-kube-api-access-s94b5\") on node \"crc\" DevicePath \"\"" Dec 02 12:10:46 crc kubenswrapper[4721]: I1202 12:10:46.647257 4721 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:10:46 crc kubenswrapper[4721]: I1202 12:10:46.647264 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:10:46 crc kubenswrapper[4721]: I1202 12:10:46.647279 4721 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:10:47 crc kubenswrapper[4721]: I1202 12:10:47.205110 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-p5vcf" event={"ID":"2b1bfb1c-aa60-4f1c-a1e9-007c2118d139","Type":"ContainerDied","Data":"eb513383c35f93985f79c8e28da26d8bb62f8df1562a6399d96a36d86b5de49f"} Dec 02 12:10:47 crc kubenswrapper[4721]: I1202 12:10:47.205154 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eb513383c35f93985f79c8e28da26d8bb62f8df1562a6399d96a36d86b5de49f" Dec 02 12:10:47 crc kubenswrapper[4721]: I1202 12:10:47.205450 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-p5vcf" Dec 02 12:10:47 crc kubenswrapper[4721]: I1202 12:10:47.491387 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-lfzxd"] Dec 02 12:10:47 crc kubenswrapper[4721]: E1202 12:10:47.491838 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b1bfb1c-aa60-4f1c-a1e9-007c2118d139" containerName="swift-ring-rebalance" Dec 02 12:10:47 crc kubenswrapper[4721]: I1202 12:10:47.491859 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b1bfb1c-aa60-4f1c-a1e9-007c2118d139" containerName="swift-ring-rebalance" Dec 02 12:10:47 crc kubenswrapper[4721]: I1202 12:10:47.492077 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b1bfb1c-aa60-4f1c-a1e9-007c2118d139" containerName="swift-ring-rebalance" Dec 02 12:10:47 crc kubenswrapper[4721]: I1202 12:10:47.495738 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-lfzxd" Dec 02 12:10:47 crc kubenswrapper[4721]: I1202 12:10:47.498801 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Dec 02 12:10:47 crc kubenswrapper[4721]: I1202 12:10:47.498898 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Dec 02 12:10:47 crc kubenswrapper[4721]: I1202 12:10:47.524150 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-lfzxd"] Dec 02 12:10:47 crc kubenswrapper[4721]: I1202 12:10:47.558237 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2398b904-d5c9-4622-a01c-6add8997b0e7-swiftconf\") pod \"swift-ring-rebalance-debug-lfzxd\" (UID: \"2398b904-d5c9-4622-a01c-6add8997b0e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lfzxd" Dec 02 12:10:47 crc kubenswrapper[4721]: I1202 12:10:47.558392 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2398b904-d5c9-4622-a01c-6add8997b0e7-ring-data-devices\") pod \"swift-ring-rebalance-debug-lfzxd\" (UID: \"2398b904-d5c9-4622-a01c-6add8997b0e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lfzxd" Dec 02 12:10:47 crc kubenswrapper[4721]: I1202 12:10:47.558420 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f675s\" (UniqueName: \"kubernetes.io/projected/2398b904-d5c9-4622-a01c-6add8997b0e7-kube-api-access-f675s\") pod \"swift-ring-rebalance-debug-lfzxd\" (UID: \"2398b904-d5c9-4622-a01c-6add8997b0e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lfzxd" Dec 02 12:10:47 crc kubenswrapper[4721]: I1202 12:10:47.558461 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2398b904-d5c9-4622-a01c-6add8997b0e7-scripts\") pod \"swift-ring-rebalance-debug-lfzxd\" (UID: \"2398b904-d5c9-4622-a01c-6add8997b0e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lfzxd" Dec 02 12:10:47 crc kubenswrapper[4721]: I1202 12:10:47.558492 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2398b904-d5c9-4622-a01c-6add8997b0e7-dispersionconf\") pod \"swift-ring-rebalance-debug-lfzxd\" (UID: \"2398b904-d5c9-4622-a01c-6add8997b0e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lfzxd" Dec 02 12:10:47 crc kubenswrapper[4721]: I1202 12:10:47.558518 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2398b904-d5c9-4622-a01c-6add8997b0e7-etc-swift\") pod \"swift-ring-rebalance-debug-lfzxd\" (UID: \"2398b904-d5c9-4622-a01c-6add8997b0e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lfzxd" Dec 02 12:10:47 crc kubenswrapper[4721]: I1202 12:10:47.659256 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2398b904-d5c9-4622-a01c-6add8997b0e7-swiftconf\") pod \"swift-ring-rebalance-debug-lfzxd\" (UID: \"2398b904-d5c9-4622-a01c-6add8997b0e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lfzxd" Dec 02 12:10:47 crc kubenswrapper[4721]: I1202 12:10:47.659374 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2398b904-d5c9-4622-a01c-6add8997b0e7-ring-data-devices\") pod \"swift-ring-rebalance-debug-lfzxd\" (UID: \"2398b904-d5c9-4622-a01c-6add8997b0e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lfzxd" Dec 02 12:10:47 crc kubenswrapper[4721]: I1202 12:10:47.659395 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f675s\" (UniqueName: \"kubernetes.io/projected/2398b904-d5c9-4622-a01c-6add8997b0e7-kube-api-access-f675s\") pod \"swift-ring-rebalance-debug-lfzxd\" (UID: \"2398b904-d5c9-4622-a01c-6add8997b0e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lfzxd" Dec 02 12:10:47 crc kubenswrapper[4721]: I1202 12:10:47.659427 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2398b904-d5c9-4622-a01c-6add8997b0e7-scripts\") pod \"swift-ring-rebalance-debug-lfzxd\" (UID: \"2398b904-d5c9-4622-a01c-6add8997b0e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lfzxd" Dec 02 12:10:47 crc kubenswrapper[4721]: I1202 12:10:47.659450 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2398b904-d5c9-4622-a01c-6add8997b0e7-dispersionconf\") pod \"swift-ring-rebalance-debug-lfzxd\" (UID: \"2398b904-d5c9-4622-a01c-6add8997b0e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lfzxd" Dec 02 12:10:47 crc kubenswrapper[4721]: I1202 12:10:47.659470 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2398b904-d5c9-4622-a01c-6add8997b0e7-etc-swift\") pod \"swift-ring-rebalance-debug-lfzxd\" (UID: \"2398b904-d5c9-4622-a01c-6add8997b0e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lfzxd" Dec 02 12:10:47 crc kubenswrapper[4721]: I1202 12:10:47.660033 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2398b904-d5c9-4622-a01c-6add8997b0e7-etc-swift\") pod \"swift-ring-rebalance-debug-lfzxd\" (UID: \"2398b904-d5c9-4622-a01c-6add8997b0e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lfzxd" Dec 02 12:10:47 crc kubenswrapper[4721]: I1202 12:10:47.660243 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2398b904-d5c9-4622-a01c-6add8997b0e7-ring-data-devices\") pod \"swift-ring-rebalance-debug-lfzxd\" (UID: \"2398b904-d5c9-4622-a01c-6add8997b0e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lfzxd" Dec 02 12:10:47 crc kubenswrapper[4721]: I1202 12:10:47.660258 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2398b904-d5c9-4622-a01c-6add8997b0e7-scripts\") pod \"swift-ring-rebalance-debug-lfzxd\" (UID: \"2398b904-d5c9-4622-a01c-6add8997b0e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lfzxd" Dec 02 12:10:47 crc kubenswrapper[4721]: I1202 12:10:47.663779 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2398b904-d5c9-4622-a01c-6add8997b0e7-swiftconf\") pod \"swift-ring-rebalance-debug-lfzxd\" (UID: \"2398b904-d5c9-4622-a01c-6add8997b0e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lfzxd" Dec 02 12:10:47 crc kubenswrapper[4721]: I1202 12:10:47.666718 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2398b904-d5c9-4622-a01c-6add8997b0e7-dispersionconf\") pod \"swift-ring-rebalance-debug-lfzxd\" (UID: \"2398b904-d5c9-4622-a01c-6add8997b0e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lfzxd" Dec 02 12:10:47 crc kubenswrapper[4721]: I1202 12:10:47.677694 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f675s\" (UniqueName: \"kubernetes.io/projected/2398b904-d5c9-4622-a01c-6add8997b0e7-kube-api-access-f675s\") pod \"swift-ring-rebalance-debug-lfzxd\" (UID: \"2398b904-d5c9-4622-a01c-6add8997b0e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-lfzxd" Dec 02 12:10:47 crc kubenswrapper[4721]: I1202 12:10:47.814228 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-lfzxd" Dec 02 12:10:48 crc kubenswrapper[4721]: I1202 12:10:48.268811 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-lfzxd"] Dec 02 12:10:49 crc kubenswrapper[4721]: I1202 12:10:49.222766 4721 generic.go:334] "Generic (PLEG): container finished" podID="2398b904-d5c9-4622-a01c-6add8997b0e7" containerID="5dbeb082471c17a6459bf2bd1dae789fd53364b5c5699d308a67956fd0ef2c1a" exitCode=0 Dec 02 12:10:49 crc kubenswrapper[4721]: I1202 12:10:49.222827 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-lfzxd" event={"ID":"2398b904-d5c9-4622-a01c-6add8997b0e7","Type":"ContainerDied","Data":"5dbeb082471c17a6459bf2bd1dae789fd53364b5c5699d308a67956fd0ef2c1a"} Dec 02 12:10:49 crc kubenswrapper[4721]: I1202 12:10:49.223182 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-lfzxd" event={"ID":"2398b904-d5c9-4622-a01c-6add8997b0e7","Type":"ContainerStarted","Data":"334e6a2b7d4645ff85dee8584e0edc18ef81155513fbe339c8a192264c949351"} Dec 02 12:10:49 crc kubenswrapper[4721]: I1202 12:10:49.268851 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-lfzxd"] Dec 02 12:10:49 crc kubenswrapper[4721]: I1202 12:10:49.268910 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-lfzxd"] Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.505070 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-lfzxd" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.611401 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2398b904-d5c9-4622-a01c-6add8997b0e7-swiftconf\") pod \"2398b904-d5c9-4622-a01c-6add8997b0e7\" (UID: \"2398b904-d5c9-4622-a01c-6add8997b0e7\") " Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.611443 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f675s\" (UniqueName: \"kubernetes.io/projected/2398b904-d5c9-4622-a01c-6add8997b0e7-kube-api-access-f675s\") pod \"2398b904-d5c9-4622-a01c-6add8997b0e7\" (UID: \"2398b904-d5c9-4622-a01c-6add8997b0e7\") " Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.611474 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2398b904-d5c9-4622-a01c-6add8997b0e7-etc-swift\") pod \"2398b904-d5c9-4622-a01c-6add8997b0e7\" (UID: \"2398b904-d5c9-4622-a01c-6add8997b0e7\") " Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.611506 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2398b904-d5c9-4622-a01c-6add8997b0e7-dispersionconf\") pod \"2398b904-d5c9-4622-a01c-6add8997b0e7\" (UID: \"2398b904-d5c9-4622-a01c-6add8997b0e7\") " Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.611550 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2398b904-d5c9-4622-a01c-6add8997b0e7-ring-data-devices\") pod \"2398b904-d5c9-4622-a01c-6add8997b0e7\" (UID: \"2398b904-d5c9-4622-a01c-6add8997b0e7\") " Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.611564 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2398b904-d5c9-4622-a01c-6add8997b0e7-scripts\") pod \"2398b904-d5c9-4622-a01c-6add8997b0e7\" (UID: \"2398b904-d5c9-4622-a01c-6add8997b0e7\") " Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.612933 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2398b904-d5c9-4622-a01c-6add8997b0e7-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "2398b904-d5c9-4622-a01c-6add8997b0e7" (UID: "2398b904-d5c9-4622-a01c-6add8997b0e7"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.613034 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2398b904-d5c9-4622-a01c-6add8997b0e7-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "2398b904-d5c9-4622-a01c-6add8997b0e7" (UID: "2398b904-d5c9-4622-a01c-6add8997b0e7"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.616195 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2398b904-d5c9-4622-a01c-6add8997b0e7-kube-api-access-f675s" (OuterVolumeSpecName: "kube-api-access-f675s") pod "2398b904-d5c9-4622-a01c-6add8997b0e7" (UID: "2398b904-d5c9-4622-a01c-6add8997b0e7"). InnerVolumeSpecName "kube-api-access-f675s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.630004 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2398b904-d5c9-4622-a01c-6add8997b0e7-scripts" (OuterVolumeSpecName: "scripts") pod "2398b904-d5c9-4622-a01c-6add8997b0e7" (UID: "2398b904-d5c9-4622-a01c-6add8997b0e7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.638951 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2398b904-d5c9-4622-a01c-6add8997b0e7-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "2398b904-d5c9-4622-a01c-6add8997b0e7" (UID: "2398b904-d5c9-4622-a01c-6add8997b0e7"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.639001 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2398b904-d5c9-4622-a01c-6add8997b0e7-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "2398b904-d5c9-4622-a01c-6add8997b0e7" (UID: "2398b904-d5c9-4622-a01c-6add8997b0e7"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.712769 4721 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2398b904-d5c9-4622-a01c-6add8997b0e7-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.712808 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2398b904-d5c9-4622-a01c-6add8997b0e7-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.712820 4721 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2398b904-d5c9-4622-a01c-6add8997b0e7-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.712830 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f675s\" (UniqueName: \"kubernetes.io/projected/2398b904-d5c9-4622-a01c-6add8997b0e7-kube-api-access-f675s\") on node \"crc\" DevicePath \"\"" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.712843 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2398b904-d5c9-4622-a01c-6add8997b0e7-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.712854 4721 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2398b904-d5c9-4622-a01c-6add8997b0e7-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.721485 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-nzxbb"] Dec 02 12:10:50 crc kubenswrapper[4721]: E1202 12:10:50.721801 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2398b904-d5c9-4622-a01c-6add8997b0e7" containerName="swift-ring-rebalance" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.721821 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="2398b904-d5c9-4622-a01c-6add8997b0e7" containerName="swift-ring-rebalance" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.721971 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="2398b904-d5c9-4622-a01c-6add8997b0e7" containerName="swift-ring-rebalance" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.722484 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-nzxbb" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.736776 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-nzxbb"] Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.814574 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/44cd03cd-76ac-42f0-925b-3cea33940cb2-scripts\") pod \"swift-ring-rebalance-debug-nzxbb\" (UID: \"44cd03cd-76ac-42f0-925b-3cea33940cb2\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-nzxbb" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.814631 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/44cd03cd-76ac-42f0-925b-3cea33940cb2-swiftconf\") pod \"swift-ring-rebalance-debug-nzxbb\" (UID: \"44cd03cd-76ac-42f0-925b-3cea33940cb2\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-nzxbb" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.814668 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/44cd03cd-76ac-42f0-925b-3cea33940cb2-ring-data-devices\") pod \"swift-ring-rebalance-debug-nzxbb\" (UID: \"44cd03cd-76ac-42f0-925b-3cea33940cb2\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-nzxbb" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.814694 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/44cd03cd-76ac-42f0-925b-3cea33940cb2-etc-swift\") pod \"swift-ring-rebalance-debug-nzxbb\" (UID: \"44cd03cd-76ac-42f0-925b-3cea33940cb2\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-nzxbb" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.815030 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hm26h\" (UniqueName: \"kubernetes.io/projected/44cd03cd-76ac-42f0-925b-3cea33940cb2-kube-api-access-hm26h\") pod \"swift-ring-rebalance-debug-nzxbb\" (UID: \"44cd03cd-76ac-42f0-925b-3cea33940cb2\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-nzxbb" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.815245 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/44cd03cd-76ac-42f0-925b-3cea33940cb2-dispersionconf\") pod \"swift-ring-rebalance-debug-nzxbb\" (UID: \"44cd03cd-76ac-42f0-925b-3cea33940cb2\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-nzxbb" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.849690 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2398b904-d5c9-4622-a01c-6add8997b0e7" path="/var/lib/kubelet/pods/2398b904-d5c9-4622-a01c-6add8997b0e7/volumes" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.917138 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hm26h\" (UniqueName: \"kubernetes.io/projected/44cd03cd-76ac-42f0-925b-3cea33940cb2-kube-api-access-hm26h\") pod \"swift-ring-rebalance-debug-nzxbb\" (UID: \"44cd03cd-76ac-42f0-925b-3cea33940cb2\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-nzxbb" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.917284 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/44cd03cd-76ac-42f0-925b-3cea33940cb2-dispersionconf\") pod \"swift-ring-rebalance-debug-nzxbb\" (UID: \"44cd03cd-76ac-42f0-925b-3cea33940cb2\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-nzxbb" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.917482 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/44cd03cd-76ac-42f0-925b-3cea33940cb2-scripts\") pod \"swift-ring-rebalance-debug-nzxbb\" (UID: \"44cd03cd-76ac-42f0-925b-3cea33940cb2\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-nzxbb" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.917535 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/44cd03cd-76ac-42f0-925b-3cea33940cb2-swiftconf\") pod \"swift-ring-rebalance-debug-nzxbb\" (UID: \"44cd03cd-76ac-42f0-925b-3cea33940cb2\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-nzxbb" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.917594 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/44cd03cd-76ac-42f0-925b-3cea33940cb2-ring-data-devices\") pod \"swift-ring-rebalance-debug-nzxbb\" (UID: \"44cd03cd-76ac-42f0-925b-3cea33940cb2\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-nzxbb" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.917684 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/44cd03cd-76ac-42f0-925b-3cea33940cb2-etc-swift\") pod \"swift-ring-rebalance-debug-nzxbb\" (UID: \"44cd03cd-76ac-42f0-925b-3cea33940cb2\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-nzxbb" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.918506 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/44cd03cd-76ac-42f0-925b-3cea33940cb2-etc-swift\") pod \"swift-ring-rebalance-debug-nzxbb\" (UID: \"44cd03cd-76ac-42f0-925b-3cea33940cb2\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-nzxbb" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.919751 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/44cd03cd-76ac-42f0-925b-3cea33940cb2-ring-data-devices\") pod \"swift-ring-rebalance-debug-nzxbb\" (UID: \"44cd03cd-76ac-42f0-925b-3cea33940cb2\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-nzxbb" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.920257 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/44cd03cd-76ac-42f0-925b-3cea33940cb2-scripts\") pod \"swift-ring-rebalance-debug-nzxbb\" (UID: \"44cd03cd-76ac-42f0-925b-3cea33940cb2\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-nzxbb" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.922468 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/44cd03cd-76ac-42f0-925b-3cea33940cb2-swiftconf\") pod \"swift-ring-rebalance-debug-nzxbb\" (UID: \"44cd03cd-76ac-42f0-925b-3cea33940cb2\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-nzxbb" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.923798 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/44cd03cd-76ac-42f0-925b-3cea33940cb2-dispersionconf\") pod \"swift-ring-rebalance-debug-nzxbb\" (UID: \"44cd03cd-76ac-42f0-925b-3cea33940cb2\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-nzxbb" Dec 02 12:10:50 crc kubenswrapper[4721]: I1202 12:10:50.936232 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hm26h\" (UniqueName: \"kubernetes.io/projected/44cd03cd-76ac-42f0-925b-3cea33940cb2-kube-api-access-hm26h\") pod \"swift-ring-rebalance-debug-nzxbb\" (UID: \"44cd03cd-76ac-42f0-925b-3cea33940cb2\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-nzxbb" Dec 02 12:10:51 crc kubenswrapper[4721]: I1202 12:10:51.039612 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-nzxbb" Dec 02 12:10:51 crc kubenswrapper[4721]: I1202 12:10:51.245690 4721 scope.go:117] "RemoveContainer" containerID="5dbeb082471c17a6459bf2bd1dae789fd53364b5c5699d308a67956fd0ef2c1a" Dec 02 12:10:51 crc kubenswrapper[4721]: I1202 12:10:51.245720 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-lfzxd" Dec 02 12:10:51 crc kubenswrapper[4721]: I1202 12:10:51.513319 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-nzxbb"] Dec 02 12:10:51 crc kubenswrapper[4721]: W1202 12:10:51.519263 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod44cd03cd_76ac_42f0_925b_3cea33940cb2.slice/crio-07f3fd497f1a9d27a6f32268904d2feb7dd889c6163618a424aa5ce44caf0fdb WatchSource:0}: Error finding container 07f3fd497f1a9d27a6f32268904d2feb7dd889c6163618a424aa5ce44caf0fdb: Status 404 returned error can't find the container with id 07f3fd497f1a9d27a6f32268904d2feb7dd889c6163618a424aa5ce44caf0fdb Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.257179 4721 generic.go:334] "Generic (PLEG): container finished" podID="44cd03cd-76ac-42f0-925b-3cea33940cb2" containerID="a566917d8049e869f0b4dae0e086a2c85f6e583accf2756e83250aa459804176" exitCode=0 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.258001 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-nzxbb" event={"ID":"44cd03cd-76ac-42f0-925b-3cea33940cb2","Type":"ContainerDied","Data":"a566917d8049e869f0b4dae0e086a2c85f6e583accf2756e83250aa459804176"} Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.258031 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-nzxbb" event={"ID":"44cd03cd-76ac-42f0-925b-3cea33940cb2","Type":"ContainerStarted","Data":"07f3fd497f1a9d27a6f32268904d2feb7dd889c6163618a424aa5ce44caf0fdb"} Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.311899 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-nzxbb"] Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.319252 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-nzxbb"] Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.419063 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.419564 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="account-server" containerID="cri-o://c98f9453ed9e75ea4836cdf8e6b5a3244e10b1a2aa391d76ff2f94bb0544c315" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.419682 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="container-updater" containerID="cri-o://18b6e4a6f8e7df1af99a0dd9ecd2a89682cf778ff4a5070ecad697cafcc9de71" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.419679 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="swift-recon-cron" containerID="cri-o://d9453615c91a9028a3fa90cd00b70ad0567371011d6702dbb05879e5b2901d09" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.419675 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="object-expirer" containerID="cri-o://79048ad547310d151b543ca2c4d92123297c5d7450460d18f72db7d8fafb3fac" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.419727 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="container-auditor" containerID="cri-o://eac8db7656343e113eab7fa319706f4aa7e3ffd7817df882e1564f9bd3a0f1cf" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.419740 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="object-auditor" containerID="cri-o://399113c68a08ec5da56d1d064dffcf8274965c542e9200e84cddcda9cbbf4424" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.419760 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="container-replicator" containerID="cri-o://6a14acfad3a520fab2c978ed50126f1f07a27f4477cf6fce3e94912e0a93ef5b" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.419789 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="rsync" containerID="cri-o://7a3d988b6f2d9267bde321ead3e578f3e1b5cc60aa115a5283457bd419e54df0" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.419796 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="container-server" containerID="cri-o://33b1828595b8c80890eae52dfcd95d97abd22b8b0c3f51c5b0f3914c5c94004e" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.419725 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="object-updater" containerID="cri-o://ff56691d3a9c1f98185ec467975a12a9eb5195656a43f650c14f5027db84c79c" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.419849 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="account-auditor" containerID="cri-o://a87b177f0540cc3e19c31eac0f39199242540d018e4084a050ad66d6f05b5476" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.419839 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="account-reaper" containerID="cri-o://ce28b6e9f05405a0b6043c588b3af01293215c9b71a887847201c5e25e84c087" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.419881 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="account-replicator" containerID="cri-o://24ba6660ddc36471c262bd11a52bbe8a3bad428befacec5ddf02eedc95c3e6d0" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.419902 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="object-replicator" containerID="cri-o://b209ace06b46ae1736b7777c480e2ebbec7d2d80b33606460a3e42f6eb9e8b63" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.419668 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="object-server" containerID="cri-o://e232483cb243215b722c6f0bfb3531c2d52e026ab0148df621f5a505cc098d6c" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.432657 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.436070 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="account-server" containerID="cri-o://995b2f3e6c864718f742047ed355ec10f9b204fdfff5a65859243e596dc1f883" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.436415 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="swift-recon-cron" containerID="cri-o://bca906d1cc3affbe17a28df28f0505ed399d602c1464a98f21b5efa1f72368a3" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.436455 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="rsync" containerID="cri-o://fdace05da094705c18217f06a7a51510f7e8b78478183bbe68593716361576c1" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.436499 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="object-expirer" containerID="cri-o://48d0a3ea165bcf7be03ce424410c59c646b11008f505aedca257771e05d47d7b" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.436531 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="object-updater" containerID="cri-o://e204416ba36c46cda9a223da1214bf8edf260cb4e5c3045aa5a803e1a0724a6c" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.436560 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="object-auditor" containerID="cri-o://ce6811fa26efd6c19e05e8fbedc6f1bba94e07d1697b8654d725090db0856174" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.436588 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="object-replicator" containerID="cri-o://a8f531ad5245090f9458ffaf6b15767b2cf1e267e55587129ff8745980b7dc83" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.436626 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="object-server" containerID="cri-o://40af61249b0b7b921b5139f31ceff4851448a0fad0e7bd3973356446ae2b0851" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.436656 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="container-updater" containerID="cri-o://821584e74f43e336348ba071bdabdfc3e5d8e843b62a6261110261e0b92582a8" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.436685 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="container-auditor" containerID="cri-o://e83dc368eb5822bf3f3d2476ee5ed27744fdcb5dcfd00abcf7a01c1b0ae2e44d" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.436716 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="container-replicator" containerID="cri-o://c9ad5b423e11c9c124a474d325df92864a94a39e95c34bdf81c15770a2e3b3ff" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.436745 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="container-server" containerID="cri-o://088dff10742ea220c32ab9d82c56f0c66f001abfc1d5937f1374200d556f515c" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.436781 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="account-reaper" containerID="cri-o://f513e4ce4313743e980239aeb5eefca6c4b1e395a3c36451bf6d2161c7366c64" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.437005 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="account-auditor" containerID="cri-o://802178876a4950d3ce589b0214d46ed8a710139f5d544d3417bb944c86774e96" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.437049 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="account-replicator" containerID="cri-o://03c4f2324972362948f96ee55e110ab525ab6abfa434439a0e4df3e0a866ff71" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.453721 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.454354 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="account-server" containerID="cri-o://823ead0e1dba1a6808b1545c985f36a5a06fbfff4dd41b9505ef04739ce6c35b" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.454521 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="object-expirer" containerID="cri-o://1a58821aa2d18c78cf9ee3ebed4af2694174839aabf139e0e6f945a3a54dd270" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.454604 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="object-updater" containerID="cri-o://e5aa55bbc88958f81c0074e5ce8a912e3147aa864c5b2b850db9efb3ab41ddd6" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.454663 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="object-auditor" containerID="cri-o://270c4fb66daff1d8d57a60b18580156fc9694244d1f0de1bec2c8ac2f36f9907" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.454669 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="swift-recon-cron" containerID="cri-o://6fd69500bf05386473742672cfdfabd0ba5e53c6ea2849a034a010998107fd62" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.454726 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="object-replicator" containerID="cri-o://5467bc17f876398c85911f415554a36185c7dd84bbff64110bf3c279c8483200" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.454798 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="account-reaper" containerID="cri-o://099a21d19c8049b4e27934f6185161bf78b3d33f6e2df9eeb2d31c3c23a8f3fc" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.454842 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="container-updater" containerID="cri-o://65799d55d1b5c936b5df5cb2f6d3f6736f2488ece2c9a056c8dbfaa7346f84d6" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.454831 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="account-auditor" containerID="cri-o://9e68c8145c46c51de310d82790fa92bfbcf3508bfe0b206a0a0f365566661343" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.454878 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="container-auditor" containerID="cri-o://b583c65dcf1e1667c262d27f780ae405536c139dc4ae684d0b25d3ecfaeedeb2" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.454914 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="container-replicator" containerID="cri-o://f176422a2d531c26166f96b401d351a6fe34b52eb8bece2163366d4beaefbff4" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.454949 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="container-server" containerID="cri-o://a92cbb3c16ac4018f46d76bdd233a690ebdb976f966bacbd6f10368289751f89" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.454788 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="object-server" containerID="cri-o://cd57944d6e44a8ede134d360793593fc774b90b4826e4d2196762778b5fc087c" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.454324 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="rsync" containerID="cri-o://1e07972f5499cd9722b51c64d003837027ad296aac2851843b87f31515436f89" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.455042 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="account-replicator" containerID="cri-o://f5a932dca5f2810a25a775136f518bba1a4e05d017c2f1ed37482e545105d6dd" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.461664 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-p5vcf"] Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.467068 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-p5vcf"] Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.498557 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z"] Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.498837 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" podUID="e1498f09-7529-4b3e-b2b8-a11b5a5a4624" containerName="proxy-httpd" containerID="cri-o://2a29f9fe2c6a43104ceaa969dc2f696866343069ca88299ea997c871d7ee689e" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.499160 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" podUID="e1498f09-7529-4b3e-b2b8-a11b5a5a4624" containerName="proxy-server" containerID="cri-o://47b26f0b8b19e903cbad514410be260867bb04d8bb7a11b867cec9ac05438844" gracePeriod=30 Dec 02 12:10:52 crc kubenswrapper[4721]: I1202 12:10:52.851829 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b1bfb1c-aa60-4f1c-a1e9-007c2118d139" path="/var/lib/kubelet/pods/2b1bfb1c-aa60-4f1c-a1e9-007c2118d139/volumes" Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.265598 4721 generic.go:334] "Generic (PLEG): container finished" podID="e1498f09-7529-4b3e-b2b8-a11b5a5a4624" containerID="47b26f0b8b19e903cbad514410be260867bb04d8bb7a11b867cec9ac05438844" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.265913 4721 generic.go:334] "Generic (PLEG): container finished" podID="e1498f09-7529-4b3e-b2b8-a11b5a5a4624" containerID="2a29f9fe2c6a43104ceaa969dc2f696866343069ca88299ea997c871d7ee689e" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.265774 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" event={"ID":"e1498f09-7529-4b3e-b2b8-a11b5a5a4624","Type":"ContainerDied","Data":"47b26f0b8b19e903cbad514410be260867bb04d8bb7a11b867cec9ac05438844"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.265975 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" event={"ID":"e1498f09-7529-4b3e-b2b8-a11b5a5a4624","Type":"ContainerDied","Data":"2a29f9fe2c6a43104ceaa969dc2f696866343069ca88299ea997c871d7ee689e"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.272111 4721 generic.go:334] "Generic (PLEG): container finished" podID="27eb4674-477b-4189-857a-d5a572e89277" containerID="1e07972f5499cd9722b51c64d003837027ad296aac2851843b87f31515436f89" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.272136 4721 generic.go:334] "Generic (PLEG): container finished" podID="27eb4674-477b-4189-857a-d5a572e89277" containerID="1a58821aa2d18c78cf9ee3ebed4af2694174839aabf139e0e6f945a3a54dd270" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.272148 4721 generic.go:334] "Generic (PLEG): container finished" podID="27eb4674-477b-4189-857a-d5a572e89277" containerID="e5aa55bbc88958f81c0074e5ce8a912e3147aa864c5b2b850db9efb3ab41ddd6" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.272159 4721 generic.go:334] "Generic (PLEG): container finished" podID="27eb4674-477b-4189-857a-d5a572e89277" containerID="270c4fb66daff1d8d57a60b18580156fc9694244d1f0de1bec2c8ac2f36f9907" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.272170 4721 generic.go:334] "Generic (PLEG): container finished" podID="27eb4674-477b-4189-857a-d5a572e89277" containerID="5467bc17f876398c85911f415554a36185c7dd84bbff64110bf3c279c8483200" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.272178 4721 generic.go:334] "Generic (PLEG): container finished" podID="27eb4674-477b-4189-857a-d5a572e89277" containerID="cd57944d6e44a8ede134d360793593fc774b90b4826e4d2196762778b5fc087c" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.272186 4721 generic.go:334] "Generic (PLEG): container finished" podID="27eb4674-477b-4189-857a-d5a572e89277" containerID="65799d55d1b5c936b5df5cb2f6d3f6736f2488ece2c9a056c8dbfaa7346f84d6" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.272195 4721 generic.go:334] "Generic (PLEG): container finished" podID="27eb4674-477b-4189-857a-d5a572e89277" containerID="b583c65dcf1e1667c262d27f780ae405536c139dc4ae684d0b25d3ecfaeedeb2" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.272203 4721 generic.go:334] "Generic (PLEG): container finished" podID="27eb4674-477b-4189-857a-d5a572e89277" containerID="f176422a2d531c26166f96b401d351a6fe34b52eb8bece2163366d4beaefbff4" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.272211 4721 generic.go:334] "Generic (PLEG): container finished" podID="27eb4674-477b-4189-857a-d5a572e89277" containerID="a92cbb3c16ac4018f46d76bdd233a690ebdb976f966bacbd6f10368289751f89" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.272219 4721 generic.go:334] "Generic (PLEG): container finished" podID="27eb4674-477b-4189-857a-d5a572e89277" containerID="099a21d19c8049b4e27934f6185161bf78b3d33f6e2df9eeb2d31c3c23a8f3fc" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.272227 4721 generic.go:334] "Generic (PLEG): container finished" podID="27eb4674-477b-4189-857a-d5a572e89277" containerID="9e68c8145c46c51de310d82790fa92bfbcf3508bfe0b206a0a0f365566661343" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.272234 4721 generic.go:334] "Generic (PLEG): container finished" podID="27eb4674-477b-4189-857a-d5a572e89277" containerID="f5a932dca5f2810a25a775136f518bba1a4e05d017c2f1ed37482e545105d6dd" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.272241 4721 generic.go:334] "Generic (PLEG): container finished" podID="27eb4674-477b-4189-857a-d5a572e89277" containerID="823ead0e1dba1a6808b1545c985f36a5a06fbfff4dd41b9505ef04739ce6c35b" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.272278 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerDied","Data":"1e07972f5499cd9722b51c64d003837027ad296aac2851843b87f31515436f89"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.272297 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerDied","Data":"1a58821aa2d18c78cf9ee3ebed4af2694174839aabf139e0e6f945a3a54dd270"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.272309 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerDied","Data":"e5aa55bbc88958f81c0074e5ce8a912e3147aa864c5b2b850db9efb3ab41ddd6"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.272320 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerDied","Data":"270c4fb66daff1d8d57a60b18580156fc9694244d1f0de1bec2c8ac2f36f9907"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.272330 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerDied","Data":"5467bc17f876398c85911f415554a36185c7dd84bbff64110bf3c279c8483200"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.272357 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerDied","Data":"cd57944d6e44a8ede134d360793593fc774b90b4826e4d2196762778b5fc087c"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.272370 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerDied","Data":"65799d55d1b5c936b5df5cb2f6d3f6736f2488ece2c9a056c8dbfaa7346f84d6"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.272381 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerDied","Data":"b583c65dcf1e1667c262d27f780ae405536c139dc4ae684d0b25d3ecfaeedeb2"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.272392 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerDied","Data":"f176422a2d531c26166f96b401d351a6fe34b52eb8bece2163366d4beaefbff4"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.272405 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerDied","Data":"a92cbb3c16ac4018f46d76bdd233a690ebdb976f966bacbd6f10368289751f89"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.272417 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerDied","Data":"099a21d19c8049b4e27934f6185161bf78b3d33f6e2df9eeb2d31c3c23a8f3fc"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.272428 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerDied","Data":"9e68c8145c46c51de310d82790fa92bfbcf3508bfe0b206a0a0f365566661343"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.272439 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerDied","Data":"f5a932dca5f2810a25a775136f518bba1a4e05d017c2f1ed37482e545105d6dd"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.272450 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerDied","Data":"823ead0e1dba1a6808b1545c985f36a5a06fbfff4dd41b9505ef04739ce6c35b"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.277087 4721 generic.go:334] "Generic (PLEG): container finished" podID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerID="fdace05da094705c18217f06a7a51510f7e8b78478183bbe68593716361576c1" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.277104 4721 generic.go:334] "Generic (PLEG): container finished" podID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerID="48d0a3ea165bcf7be03ce424410c59c646b11008f505aedca257771e05d47d7b" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.277111 4721 generic.go:334] "Generic (PLEG): container finished" podID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerID="e204416ba36c46cda9a223da1214bf8edf260cb4e5c3045aa5a803e1a0724a6c" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.277117 4721 generic.go:334] "Generic (PLEG): container finished" podID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerID="ce6811fa26efd6c19e05e8fbedc6f1bba94e07d1697b8654d725090db0856174" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.277124 4721 generic.go:334] "Generic (PLEG): container finished" podID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerID="a8f531ad5245090f9458ffaf6b15767b2cf1e267e55587129ff8745980b7dc83" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.277130 4721 generic.go:334] "Generic (PLEG): container finished" podID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerID="40af61249b0b7b921b5139f31ceff4851448a0fad0e7bd3973356446ae2b0851" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.277137 4721 generic.go:334] "Generic (PLEG): container finished" podID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerID="821584e74f43e336348ba071bdabdfc3e5d8e843b62a6261110261e0b92582a8" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.277143 4721 generic.go:334] "Generic (PLEG): container finished" podID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerID="e83dc368eb5822bf3f3d2476ee5ed27744fdcb5dcfd00abcf7a01c1b0ae2e44d" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.277149 4721 generic.go:334] "Generic (PLEG): container finished" podID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerID="c9ad5b423e11c9c124a474d325df92864a94a39e95c34bdf81c15770a2e3b3ff" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.277155 4721 generic.go:334] "Generic (PLEG): container finished" podID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerID="088dff10742ea220c32ab9d82c56f0c66f001abfc1d5937f1374200d556f515c" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.277162 4721 generic.go:334] "Generic (PLEG): container finished" podID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerID="f513e4ce4313743e980239aeb5eefca6c4b1e395a3c36451bf6d2161c7366c64" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.277168 4721 generic.go:334] "Generic (PLEG): container finished" podID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerID="802178876a4950d3ce589b0214d46ed8a710139f5d544d3417bb944c86774e96" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.277174 4721 generic.go:334] "Generic (PLEG): container finished" podID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerID="03c4f2324972362948f96ee55e110ab525ab6abfa434439a0e4df3e0a866ff71" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.277181 4721 generic.go:334] "Generic (PLEG): container finished" podID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerID="995b2f3e6c864718f742047ed355ec10f9b204fdfff5a65859243e596dc1f883" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.277207 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerDied","Data":"fdace05da094705c18217f06a7a51510f7e8b78478183bbe68593716361576c1"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.277220 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerDied","Data":"48d0a3ea165bcf7be03ce424410c59c646b11008f505aedca257771e05d47d7b"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.277230 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerDied","Data":"e204416ba36c46cda9a223da1214bf8edf260cb4e5c3045aa5a803e1a0724a6c"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.277238 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerDied","Data":"ce6811fa26efd6c19e05e8fbedc6f1bba94e07d1697b8654d725090db0856174"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.277247 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerDied","Data":"a8f531ad5245090f9458ffaf6b15767b2cf1e267e55587129ff8745980b7dc83"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.277255 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerDied","Data":"40af61249b0b7b921b5139f31ceff4851448a0fad0e7bd3973356446ae2b0851"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.277264 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerDied","Data":"821584e74f43e336348ba071bdabdfc3e5d8e843b62a6261110261e0b92582a8"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.277272 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerDied","Data":"e83dc368eb5822bf3f3d2476ee5ed27744fdcb5dcfd00abcf7a01c1b0ae2e44d"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.277280 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerDied","Data":"c9ad5b423e11c9c124a474d325df92864a94a39e95c34bdf81c15770a2e3b3ff"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.277289 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerDied","Data":"088dff10742ea220c32ab9d82c56f0c66f001abfc1d5937f1374200d556f515c"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.277297 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerDied","Data":"f513e4ce4313743e980239aeb5eefca6c4b1e395a3c36451bf6d2161c7366c64"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.277317 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerDied","Data":"802178876a4950d3ce589b0214d46ed8a710139f5d544d3417bb944c86774e96"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.277325 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerDied","Data":"03c4f2324972362948f96ee55e110ab525ab6abfa434439a0e4df3e0a866ff71"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.277334 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerDied","Data":"995b2f3e6c864718f742047ed355ec10f9b204fdfff5a65859243e596dc1f883"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.282968 4721 generic.go:334] "Generic (PLEG): container finished" podID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerID="7a3d988b6f2d9267bde321ead3e578f3e1b5cc60aa115a5283457bd419e54df0" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.282990 4721 generic.go:334] "Generic (PLEG): container finished" podID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerID="79048ad547310d151b543ca2c4d92123297c5d7450460d18f72db7d8fafb3fac" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.282998 4721 generic.go:334] "Generic (PLEG): container finished" podID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerID="ff56691d3a9c1f98185ec467975a12a9eb5195656a43f650c14f5027db84c79c" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.283004 4721 generic.go:334] "Generic (PLEG): container finished" podID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerID="399113c68a08ec5da56d1d064dffcf8274965c542e9200e84cddcda9cbbf4424" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.283010 4721 generic.go:334] "Generic (PLEG): container finished" podID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerID="b209ace06b46ae1736b7777c480e2ebbec7d2d80b33606460a3e42f6eb9e8b63" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.283017 4721 generic.go:334] "Generic (PLEG): container finished" podID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerID="e232483cb243215b722c6f0bfb3531c2d52e026ab0148df621f5a505cc098d6c" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.283024 4721 generic.go:334] "Generic (PLEG): container finished" podID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerID="18b6e4a6f8e7df1af99a0dd9ecd2a89682cf778ff4a5070ecad697cafcc9de71" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.283031 4721 generic.go:334] "Generic (PLEG): container finished" podID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerID="eac8db7656343e113eab7fa319706f4aa7e3ffd7817df882e1564f9bd3a0f1cf" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.283039 4721 generic.go:334] "Generic (PLEG): container finished" podID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerID="6a14acfad3a520fab2c978ed50126f1f07a27f4477cf6fce3e94912e0a93ef5b" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.283046 4721 generic.go:334] "Generic (PLEG): container finished" podID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerID="33b1828595b8c80890eae52dfcd95d97abd22b8b0c3f51c5b0f3914c5c94004e" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.283053 4721 generic.go:334] "Generic (PLEG): container finished" podID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerID="ce28b6e9f05405a0b6043c588b3af01293215c9b71a887847201c5e25e84c087" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.283059 4721 generic.go:334] "Generic (PLEG): container finished" podID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerID="a87b177f0540cc3e19c31eac0f39199242540d018e4084a050ad66d6f05b5476" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.283065 4721 generic.go:334] "Generic (PLEG): container finished" podID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerID="24ba6660ddc36471c262bd11a52bbe8a3bad428befacec5ddf02eedc95c3e6d0" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.283070 4721 generic.go:334] "Generic (PLEG): container finished" podID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerID="c98f9453ed9e75ea4836cdf8e6b5a3244e10b1a2aa391d76ff2f94bb0544c315" exitCode=0 Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.283253 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerDied","Data":"7a3d988b6f2d9267bde321ead3e578f3e1b5cc60aa115a5283457bd419e54df0"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.283269 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerDied","Data":"79048ad547310d151b543ca2c4d92123297c5d7450460d18f72db7d8fafb3fac"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.283279 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerDied","Data":"ff56691d3a9c1f98185ec467975a12a9eb5195656a43f650c14f5027db84c79c"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.283289 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerDied","Data":"399113c68a08ec5da56d1d064dffcf8274965c542e9200e84cddcda9cbbf4424"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.283297 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerDied","Data":"b209ace06b46ae1736b7777c480e2ebbec7d2d80b33606460a3e42f6eb9e8b63"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.283305 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerDied","Data":"e232483cb243215b722c6f0bfb3531c2d52e026ab0148df621f5a505cc098d6c"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.283314 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerDied","Data":"18b6e4a6f8e7df1af99a0dd9ecd2a89682cf778ff4a5070ecad697cafcc9de71"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.283322 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerDied","Data":"eac8db7656343e113eab7fa319706f4aa7e3ffd7817df882e1564f9bd3a0f1cf"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.283330 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerDied","Data":"6a14acfad3a520fab2c978ed50126f1f07a27f4477cf6fce3e94912e0a93ef5b"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.283353 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerDied","Data":"33b1828595b8c80890eae52dfcd95d97abd22b8b0c3f51c5b0f3914c5c94004e"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.283363 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerDied","Data":"ce28b6e9f05405a0b6043c588b3af01293215c9b71a887847201c5e25e84c087"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.283372 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerDied","Data":"a87b177f0540cc3e19c31eac0f39199242540d018e4084a050ad66d6f05b5476"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.283380 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerDied","Data":"24ba6660ddc36471c262bd11a52bbe8a3bad428befacec5ddf02eedc95c3e6d0"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.283389 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerDied","Data":"c98f9453ed9e75ea4836cdf8e6b5a3244e10b1a2aa391d76ff2f94bb0544c315"} Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.324747 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.369301 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5nnzx\" (UniqueName: \"kubernetes.io/projected/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-kube-api-access-5nnzx\") pod \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\" (UID: \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\") " Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.369459 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-run-httpd\") pod \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\" (UID: \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\") " Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.369483 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-log-httpd\") pod \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\" (UID: \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\") " Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.369513 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-etc-swift\") pod \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\" (UID: \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\") " Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.369561 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-config-data\") pod \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\" (UID: \"e1498f09-7529-4b3e-b2b8-a11b5a5a4624\") " Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.370759 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e1498f09-7529-4b3e-b2b8-a11b5a5a4624" (UID: "e1498f09-7529-4b3e-b2b8-a11b5a5a4624"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.371050 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e1498f09-7529-4b3e-b2b8-a11b5a5a4624" (UID: "e1498f09-7529-4b3e-b2b8-a11b5a5a4624"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.374996 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "e1498f09-7529-4b3e-b2b8-a11b5a5a4624" (UID: "e1498f09-7529-4b3e-b2b8-a11b5a5a4624"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.375187 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-kube-api-access-5nnzx" (OuterVolumeSpecName: "kube-api-access-5nnzx") pod "e1498f09-7529-4b3e-b2b8-a11b5a5a4624" (UID: "e1498f09-7529-4b3e-b2b8-a11b5a5a4624"). InnerVolumeSpecName "kube-api-access-5nnzx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.418148 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-config-data" (OuterVolumeSpecName: "config-data") pod "e1498f09-7529-4b3e-b2b8-a11b5a5a4624" (UID: "e1498f09-7529-4b3e-b2b8-a11b5a5a4624"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.465275 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-nzxbb" Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.471433 4721 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.471467 4721 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.471476 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.471490 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.471499 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5nnzx\" (UniqueName: \"kubernetes.io/projected/e1498f09-7529-4b3e-b2b8-a11b5a5a4624-kube-api-access-5nnzx\") on node \"crc\" DevicePath \"\"" Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.572695 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/44cd03cd-76ac-42f0-925b-3cea33940cb2-dispersionconf\") pod \"44cd03cd-76ac-42f0-925b-3cea33940cb2\" (UID: \"44cd03cd-76ac-42f0-925b-3cea33940cb2\") " Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.572820 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/44cd03cd-76ac-42f0-925b-3cea33940cb2-scripts\") pod \"44cd03cd-76ac-42f0-925b-3cea33940cb2\" (UID: \"44cd03cd-76ac-42f0-925b-3cea33940cb2\") " Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.572881 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hm26h\" (UniqueName: \"kubernetes.io/projected/44cd03cd-76ac-42f0-925b-3cea33940cb2-kube-api-access-hm26h\") pod \"44cd03cd-76ac-42f0-925b-3cea33940cb2\" (UID: \"44cd03cd-76ac-42f0-925b-3cea33940cb2\") " Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.572919 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/44cd03cd-76ac-42f0-925b-3cea33940cb2-ring-data-devices\") pod \"44cd03cd-76ac-42f0-925b-3cea33940cb2\" (UID: \"44cd03cd-76ac-42f0-925b-3cea33940cb2\") " Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.572937 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/44cd03cd-76ac-42f0-925b-3cea33940cb2-swiftconf\") pod \"44cd03cd-76ac-42f0-925b-3cea33940cb2\" (UID: \"44cd03cd-76ac-42f0-925b-3cea33940cb2\") " Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.572968 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/44cd03cd-76ac-42f0-925b-3cea33940cb2-etc-swift\") pod \"44cd03cd-76ac-42f0-925b-3cea33940cb2\" (UID: \"44cd03cd-76ac-42f0-925b-3cea33940cb2\") " Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.573771 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44cd03cd-76ac-42f0-925b-3cea33940cb2-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "44cd03cd-76ac-42f0-925b-3cea33940cb2" (UID: "44cd03cd-76ac-42f0-925b-3cea33940cb2"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.573908 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44cd03cd-76ac-42f0-925b-3cea33940cb2-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "44cd03cd-76ac-42f0-925b-3cea33940cb2" (UID: "44cd03cd-76ac-42f0-925b-3cea33940cb2"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.575922 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44cd03cd-76ac-42f0-925b-3cea33940cb2-kube-api-access-hm26h" (OuterVolumeSpecName: "kube-api-access-hm26h") pod "44cd03cd-76ac-42f0-925b-3cea33940cb2" (UID: "44cd03cd-76ac-42f0-925b-3cea33940cb2"). InnerVolumeSpecName "kube-api-access-hm26h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.590043 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44cd03cd-76ac-42f0-925b-3cea33940cb2-scripts" (OuterVolumeSpecName: "scripts") pod "44cd03cd-76ac-42f0-925b-3cea33940cb2" (UID: "44cd03cd-76ac-42f0-925b-3cea33940cb2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.591692 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44cd03cd-76ac-42f0-925b-3cea33940cb2-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "44cd03cd-76ac-42f0-925b-3cea33940cb2" (UID: "44cd03cd-76ac-42f0-925b-3cea33940cb2"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.592072 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44cd03cd-76ac-42f0-925b-3cea33940cb2-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "44cd03cd-76ac-42f0-925b-3cea33940cb2" (UID: "44cd03cd-76ac-42f0-925b-3cea33940cb2"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.674574 4721 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/44cd03cd-76ac-42f0-925b-3cea33940cb2-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.674912 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/44cd03cd-76ac-42f0-925b-3cea33940cb2-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.674935 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hm26h\" (UniqueName: \"kubernetes.io/projected/44cd03cd-76ac-42f0-925b-3cea33940cb2-kube-api-access-hm26h\") on node \"crc\" DevicePath \"\"" Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.674946 4721 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/44cd03cd-76ac-42f0-925b-3cea33940cb2-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.674955 4721 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/44cd03cd-76ac-42f0-925b-3cea33940cb2-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:10:53 crc kubenswrapper[4721]: I1202 12:10:53.674963 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/44cd03cd-76ac-42f0-925b-3cea33940cb2-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:10:54 crc kubenswrapper[4721]: I1202 12:10:54.290202 4721 scope.go:117] "RemoveContainer" containerID="a566917d8049e869f0b4dae0e086a2c85f6e583accf2756e83250aa459804176" Dec 02 12:10:54 crc kubenswrapper[4721]: I1202 12:10:54.291183 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-nzxbb" Dec 02 12:10:54 crc kubenswrapper[4721]: I1202 12:10:54.292521 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" event={"ID":"e1498f09-7529-4b3e-b2b8-a11b5a5a4624","Type":"ContainerDied","Data":"c5f17305cb463f56aafb997e5f36496b998542fca4261290d938a41420e547fb"} Dec 02 12:10:54 crc kubenswrapper[4721]: I1202 12:10:54.292605 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z" Dec 02 12:10:54 crc kubenswrapper[4721]: I1202 12:10:54.322563 4721 scope.go:117] "RemoveContainer" containerID="47b26f0b8b19e903cbad514410be260867bb04d8bb7a11b867cec9ac05438844" Dec 02 12:10:54 crc kubenswrapper[4721]: I1202 12:10:54.375314 4721 scope.go:117] "RemoveContainer" containerID="2a29f9fe2c6a43104ceaa969dc2f696866343069ca88299ea997c871d7ee689e" Dec 02 12:10:54 crc kubenswrapper[4721]: I1202 12:10:54.377633 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z"] Dec 02 12:10:54 crc kubenswrapper[4721]: I1202 12:10:54.387618 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-proxy-8b5dc4bcf-r7b9z"] Dec 02 12:10:54 crc kubenswrapper[4721]: I1202 12:10:54.850145 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44cd03cd-76ac-42f0-925b-3cea33940cb2" path="/var/lib/kubelet/pods/44cd03cd-76ac-42f0-925b-3cea33940cb2/volumes" Dec 02 12:10:54 crc kubenswrapper[4721]: I1202 12:10:54.851392 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1498f09-7529-4b3e-b2b8-a11b5a5a4624" path="/var/lib/kubelet/pods/e1498f09-7529-4b3e-b2b8-a11b5a5a4624/volumes" Dec 02 12:10:55 crc kubenswrapper[4721]: I1202 12:10:55.702461 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mcr6s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 12:10:55 crc kubenswrapper[4721]: I1202 12:10:55.702806 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.530139 4721 generic.go:334] "Generic (PLEG): container finished" podID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerID="d9453615c91a9028a3fa90cd00b70ad0567371011d6702dbb05879e5b2901d09" exitCode=137 Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.530327 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerDied","Data":"d9453615c91a9028a3fa90cd00b70ad0567371011d6702dbb05879e5b2901d09"} Dec 02 12:11:22 crc kubenswrapper[4721]: E1202 12:11:22.677267 4721 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod27eb4674_477b_4189_857a_d5a572e89277.slice/crio-conmon-6fd69500bf05386473742672cfdfabd0ba5e53c6ea2849a034a010998107fd62.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod27eb4674_477b_4189_857a_d5a572e89277.slice/crio-6fd69500bf05386473742672cfdfabd0ba5e53c6ea2849a034a010998107fd62.scope\": RecentStats: unable to find data in memory cache]" Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.822755 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.835917 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/09e554d6-e3a5-437e-a153-3ed91fadb78d-etc-swift\") pod \"09e554d6-e3a5-437e-a153-3ed91fadb78d\" (UID: \"09e554d6-e3a5-437e-a153-3ed91fadb78d\") " Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.835998 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/09e554d6-e3a5-437e-a153-3ed91fadb78d-lock\") pod \"09e554d6-e3a5-437e-a153-3ed91fadb78d\" (UID: \"09e554d6-e3a5-437e-a153-3ed91fadb78d\") " Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.836028 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/09e554d6-e3a5-437e-a153-3ed91fadb78d-cache\") pod \"09e554d6-e3a5-437e-a153-3ed91fadb78d\" (UID: \"09e554d6-e3a5-437e-a153-3ed91fadb78d\") " Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.836048 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ldgqb\" (UniqueName: \"kubernetes.io/projected/09e554d6-e3a5-437e-a153-3ed91fadb78d-kube-api-access-ldgqb\") pod \"09e554d6-e3a5-437e-a153-3ed91fadb78d\" (UID: \"09e554d6-e3a5-437e-a153-3ed91fadb78d\") " Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.836092 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"09e554d6-e3a5-437e-a153-3ed91fadb78d\" (UID: \"09e554d6-e3a5-437e-a153-3ed91fadb78d\") " Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.837175 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09e554d6-e3a5-437e-a153-3ed91fadb78d-cache" (OuterVolumeSpecName: "cache") pod "09e554d6-e3a5-437e-a153-3ed91fadb78d" (UID: "09e554d6-e3a5-437e-a153-3ed91fadb78d"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.837491 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09e554d6-e3a5-437e-a153-3ed91fadb78d-lock" (OuterVolumeSpecName: "lock") pod "09e554d6-e3a5-437e-a153-3ed91fadb78d" (UID: "09e554d6-e3a5-437e-a153-3ed91fadb78d"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.841206 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09e554d6-e3a5-437e-a153-3ed91fadb78d-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "09e554d6-e3a5-437e-a153-3ed91fadb78d" (UID: "09e554d6-e3a5-437e-a153-3ed91fadb78d"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.841461 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09e554d6-e3a5-437e-a153-3ed91fadb78d-kube-api-access-ldgqb" (OuterVolumeSpecName: "kube-api-access-ldgqb") pod "09e554d6-e3a5-437e-a153-3ed91fadb78d" (UID: "09e554d6-e3a5-437e-a153-3ed91fadb78d"). InnerVolumeSpecName "kube-api-access-ldgqb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.845994 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "swift") pod "09e554d6-e3a5-437e-a153-3ed91fadb78d" (UID: "09e554d6-e3a5-437e-a153-3ed91fadb78d"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.896755 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.898280 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.937961 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/1cff80c4-dc01-4e68-9cb7-ae41a5f05973-lock\") pod \"1cff80c4-dc01-4e68-9cb7-ae41a5f05973\" (UID: \"1cff80c4-dc01-4e68-9cb7-ae41a5f05973\") " Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.938080 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wqnst\" (UniqueName: \"kubernetes.io/projected/27eb4674-477b-4189-857a-d5a572e89277-kube-api-access-wqnst\") pod \"27eb4674-477b-4189-857a-d5a572e89277\" (UID: \"27eb4674-477b-4189-857a-d5a572e89277\") " Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.938166 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/27eb4674-477b-4189-857a-d5a572e89277-etc-swift\") pod \"27eb4674-477b-4189-857a-d5a572e89277\" (UID: \"27eb4674-477b-4189-857a-d5a572e89277\") " Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.938183 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1cff80c4-dc01-4e68-9cb7-ae41a5f05973-etc-swift\") pod \"1cff80c4-dc01-4e68-9cb7-ae41a5f05973\" (UID: \"1cff80c4-dc01-4e68-9cb7-ae41a5f05973\") " Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.938232 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/27eb4674-477b-4189-857a-d5a572e89277-lock\") pod \"27eb4674-477b-4189-857a-d5a572e89277\" (UID: \"27eb4674-477b-4189-857a-d5a572e89277\") " Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.938318 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/27eb4674-477b-4189-857a-d5a572e89277-cache\") pod \"27eb4674-477b-4189-857a-d5a572e89277\" (UID: \"27eb4674-477b-4189-857a-d5a572e89277\") " Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.938339 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1cff80c4-dc01-4e68-9cb7-ae41a5f05973-lock" (OuterVolumeSpecName: "lock") pod "1cff80c4-dc01-4e68-9cb7-ae41a5f05973" (UID: "1cff80c4-dc01-4e68-9cb7-ae41a5f05973"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.938396 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"1cff80c4-dc01-4e68-9cb7-ae41a5f05973\" (UID: \"1cff80c4-dc01-4e68-9cb7-ae41a5f05973\") " Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.938749 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27eb4674-477b-4189-857a-d5a572e89277-lock" (OuterVolumeSpecName: "lock") pod "27eb4674-477b-4189-857a-d5a572e89277" (UID: "27eb4674-477b-4189-857a-d5a572e89277"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.938757 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"27eb4674-477b-4189-857a-d5a572e89277\" (UID: \"27eb4674-477b-4189-857a-d5a572e89277\") " Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.938849 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/1cff80c4-dc01-4e68-9cb7-ae41a5f05973-cache\") pod \"1cff80c4-dc01-4e68-9cb7-ae41a5f05973\" (UID: \"1cff80c4-dc01-4e68-9cb7-ae41a5f05973\") " Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.938899 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rrscg\" (UniqueName: \"kubernetes.io/projected/1cff80c4-dc01-4e68-9cb7-ae41a5f05973-kube-api-access-rrscg\") pod \"1cff80c4-dc01-4e68-9cb7-ae41a5f05973\" (UID: \"1cff80c4-dc01-4e68-9cb7-ae41a5f05973\") " Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.938935 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27eb4674-477b-4189-857a-d5a572e89277-cache" (OuterVolumeSpecName: "cache") pod "27eb4674-477b-4189-857a-d5a572e89277" (UID: "27eb4674-477b-4189-857a-d5a572e89277"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.939511 4721 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/09e554d6-e3a5-437e-a153-3ed91fadb78d-lock\") on node \"crc\" DevicePath \"\"" Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.939526 4721 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/27eb4674-477b-4189-857a-d5a572e89277-lock\") on node \"crc\" DevicePath \"\"" Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.939534 4721 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/09e554d6-e3a5-437e-a153-3ed91fadb78d-cache\") on node \"crc\" DevicePath \"\"" Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.939544 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ldgqb\" (UniqueName: \"kubernetes.io/projected/09e554d6-e3a5-437e-a153-3ed91fadb78d-kube-api-access-ldgqb\") on node \"crc\" DevicePath \"\"" Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.939569 4721 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.939578 4721 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/27eb4674-477b-4189-857a-d5a572e89277-cache\") on node \"crc\" DevicePath \"\"" Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.939586 4721 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/1cff80c4-dc01-4e68-9cb7-ae41a5f05973-lock\") on node \"crc\" DevicePath \"\"" Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.939595 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/09e554d6-e3a5-437e-a153-3ed91fadb78d-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.941666 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1cff80c4-dc01-4e68-9cb7-ae41a5f05973-cache" (OuterVolumeSpecName: "cache") pod "1cff80c4-dc01-4e68-9cb7-ae41a5f05973" (UID: "1cff80c4-dc01-4e68-9cb7-ae41a5f05973"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.942018 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27eb4674-477b-4189-857a-d5a572e89277-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "27eb4674-477b-4189-857a-d5a572e89277" (UID: "27eb4674-477b-4189-857a-d5a572e89277"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.942066 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27eb4674-477b-4189-857a-d5a572e89277-kube-api-access-wqnst" (OuterVolumeSpecName: "kube-api-access-wqnst") pod "27eb4674-477b-4189-857a-d5a572e89277" (UID: "27eb4674-477b-4189-857a-d5a572e89277"). InnerVolumeSpecName "kube-api-access-wqnst". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.942156 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "swift") pod "1cff80c4-dc01-4e68-9cb7-ae41a5f05973" (UID: "1cff80c4-dc01-4e68-9cb7-ae41a5f05973"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.943653 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1cff80c4-dc01-4e68-9cb7-ae41a5f05973-kube-api-access-rrscg" (OuterVolumeSpecName: "kube-api-access-rrscg") pod "1cff80c4-dc01-4e68-9cb7-ae41a5f05973" (UID: "1cff80c4-dc01-4e68-9cb7-ae41a5f05973"). InnerVolumeSpecName "kube-api-access-rrscg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.944108 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "swift") pod "27eb4674-477b-4189-857a-d5a572e89277" (UID: "27eb4674-477b-4189-857a-d5a572e89277"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.947823 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1cff80c4-dc01-4e68-9cb7-ae41a5f05973-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "1cff80c4-dc01-4e68-9cb7-ae41a5f05973" (UID: "1cff80c4-dc01-4e68-9cb7-ae41a5f05973"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:11:22 crc kubenswrapper[4721]: I1202 12:11:22.953723 4721 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.040754 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wqnst\" (UniqueName: \"kubernetes.io/projected/27eb4674-477b-4189-857a-d5a572e89277-kube-api-access-wqnst\") on node \"crc\" DevicePath \"\"" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.040786 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1cff80c4-dc01-4e68-9cb7-ae41a5f05973-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.040797 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/27eb4674-477b-4189-857a-d5a572e89277-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.040805 4721 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.040834 4721 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.040847 4721 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.040856 4721 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/1cff80c4-dc01-4e68-9cb7-ae41a5f05973-cache\") on node \"crc\" DevicePath \"\"" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.040866 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rrscg\" (UniqueName: \"kubernetes.io/projected/1cff80c4-dc01-4e68-9cb7-ae41a5f05973-kube-api-access-rrscg\") on node \"crc\" DevicePath \"\"" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.052207 4721 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.052297 4721 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.141825 4721 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.141852 4721 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.543843 4721 generic.go:334] "Generic (PLEG): container finished" podID="27eb4674-477b-4189-857a-d5a572e89277" containerID="6fd69500bf05386473742672cfdfabd0ba5e53c6ea2849a034a010998107fd62" exitCode=137 Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.543900 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerDied","Data":"6fd69500bf05386473742672cfdfabd0ba5e53c6ea2849a034a010998107fd62"} Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.543954 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.543974 4721 scope.go:117] "RemoveContainer" containerID="6fd69500bf05386473742672cfdfabd0ba5e53c6ea2849a034a010998107fd62" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.543956 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"27eb4674-477b-4189-857a-d5a572e89277","Type":"ContainerDied","Data":"d641dfcd596b13facaa863878e3a218dc353a29484572bc490d78fd6f022252b"} Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.552369 4721 generic.go:334] "Generic (PLEG): container finished" podID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerID="bca906d1cc3affbe17a28df28f0505ed399d602c1464a98f21b5efa1f72368a3" exitCode=137 Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.552428 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerDied","Data":"bca906d1cc3affbe17a28df28f0505ed399d602c1464a98f21b5efa1f72368a3"} Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.552454 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"1cff80c4-dc01-4e68-9cb7-ae41a5f05973","Type":"ContainerDied","Data":"f855b21a8cf33758339f3717667d47c53ff48cef0dd22280502c05c02e3f259c"} Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.552467 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"40af61249b0b7b921b5139f31ceff4851448a0fad0e7bd3973356446ae2b0851"} Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.552477 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"821584e74f43e336348ba071bdabdfc3e5d8e843b62a6261110261e0b92582a8"} Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.552482 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e83dc368eb5822bf3f3d2476ee5ed27744fdcb5dcfd00abcf7a01c1b0ae2e44d"} Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.552487 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c9ad5b423e11c9c124a474d325df92864a94a39e95c34bdf81c15770a2e3b3ff"} Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.552493 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"088dff10742ea220c32ab9d82c56f0c66f001abfc1d5937f1374200d556f515c"} Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.552498 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f513e4ce4313743e980239aeb5eefca6c4b1e395a3c36451bf6d2161c7366c64"} Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.552504 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"802178876a4950d3ce589b0214d46ed8a710139f5d544d3417bb944c86774e96"} Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.552509 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"03c4f2324972362948f96ee55e110ab525ab6abfa434439a0e4df3e0a866ff71"} Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.552516 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"995b2f3e6c864718f742047ed355ec10f9b204fdfff5a65859243e596dc1f883"} Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.553412 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.566034 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"09e554d6-e3a5-437e-a153-3ed91fadb78d","Type":"ContainerDied","Data":"03e2c64a72b3eb763aaf870d3ec53052db9bea2a45adc28cd17ab12c86a882e0"} Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.566072 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d9453615c91a9028a3fa90cd00b70ad0567371011d6702dbb05879e5b2901d09"} Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.566083 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7a3d988b6f2d9267bde321ead3e578f3e1b5cc60aa115a5283457bd419e54df0"} Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.566090 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"79048ad547310d151b543ca2c4d92123297c5d7450460d18f72db7d8fafb3fac"} Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.566095 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ff56691d3a9c1f98185ec467975a12a9eb5195656a43f650c14f5027db84c79c"} Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.566100 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"399113c68a08ec5da56d1d064dffcf8274965c542e9200e84cddcda9cbbf4424"} Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.566106 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b209ace06b46ae1736b7777c480e2ebbec7d2d80b33606460a3e42f6eb9e8b63"} Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.566111 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e232483cb243215b722c6f0bfb3531c2d52e026ab0148df621f5a505cc098d6c"} Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.566115 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"18b6e4a6f8e7df1af99a0dd9ecd2a89682cf778ff4a5070ecad697cafcc9de71"} Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.566120 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"eac8db7656343e113eab7fa319706f4aa7e3ffd7817df882e1564f9bd3a0f1cf"} Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.566125 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6a14acfad3a520fab2c978ed50126f1f07a27f4477cf6fce3e94912e0a93ef5b"} Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.566130 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"33b1828595b8c80890eae52dfcd95d97abd22b8b0c3f51c5b0f3914c5c94004e"} Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.566135 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ce28b6e9f05405a0b6043c588b3af01293215c9b71a887847201c5e25e84c087"} Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.566139 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a87b177f0540cc3e19c31eac0f39199242540d018e4084a050ad66d6f05b5476"} Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.566144 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"24ba6660ddc36471c262bd11a52bbe8a3bad428befacec5ddf02eedc95c3e6d0"} Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.566148 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c98f9453ed9e75ea4836cdf8e6b5a3244e10b1a2aa391d76ff2f94bb0544c315"} Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.566247 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.584252 4721 scope.go:117] "RemoveContainer" containerID="1e07972f5499cd9722b51c64d003837027ad296aac2851843b87f31515436f89" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.586353 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.592975 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.621147 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.624786 4721 scope.go:117] "RemoveContainer" containerID="1a58821aa2d18c78cf9ee3ebed4af2694174839aabf139e0e6f945a3a54dd270" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.627842 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.632761 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.636775 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.639181 4721 scope.go:117] "RemoveContainer" containerID="e5aa55bbc88958f81c0074e5ce8a912e3147aa864c5b2b850db9efb3ab41ddd6" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.653422 4721 scope.go:117] "RemoveContainer" containerID="270c4fb66daff1d8d57a60b18580156fc9694244d1f0de1bec2c8ac2f36f9907" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.666603 4721 scope.go:117] "RemoveContainer" containerID="5467bc17f876398c85911f415554a36185c7dd84bbff64110bf3c279c8483200" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.680126 4721 scope.go:117] "RemoveContainer" containerID="cd57944d6e44a8ede134d360793593fc774b90b4826e4d2196762778b5fc087c" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.693154 4721 scope.go:117] "RemoveContainer" containerID="65799d55d1b5c936b5df5cb2f6d3f6736f2488ece2c9a056c8dbfaa7346f84d6" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.707309 4721 scope.go:117] "RemoveContainer" containerID="b583c65dcf1e1667c262d27f780ae405536c139dc4ae684d0b25d3ecfaeedeb2" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.725321 4721 scope.go:117] "RemoveContainer" containerID="f176422a2d531c26166f96b401d351a6fe34b52eb8bece2163366d4beaefbff4" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.737984 4721 scope.go:117] "RemoveContainer" containerID="a92cbb3c16ac4018f46d76bdd233a690ebdb976f966bacbd6f10368289751f89" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.762655 4721 scope.go:117] "RemoveContainer" containerID="099a21d19c8049b4e27934f6185161bf78b3d33f6e2df9eeb2d31c3c23a8f3fc" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.779281 4721 scope.go:117] "RemoveContainer" containerID="9e68c8145c46c51de310d82790fa92bfbcf3508bfe0b206a0a0f365566661343" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.794237 4721 scope.go:117] "RemoveContainer" containerID="f5a932dca5f2810a25a775136f518bba1a4e05d017c2f1ed37482e545105d6dd" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.813655 4721 scope.go:117] "RemoveContainer" containerID="823ead0e1dba1a6808b1545c985f36a5a06fbfff4dd41b9505ef04739ce6c35b" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.827894 4721 scope.go:117] "RemoveContainer" containerID="6fd69500bf05386473742672cfdfabd0ba5e53c6ea2849a034a010998107fd62" Dec 02 12:11:23 crc kubenswrapper[4721]: E1202 12:11:23.828228 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6fd69500bf05386473742672cfdfabd0ba5e53c6ea2849a034a010998107fd62\": container with ID starting with 6fd69500bf05386473742672cfdfabd0ba5e53c6ea2849a034a010998107fd62 not found: ID does not exist" containerID="6fd69500bf05386473742672cfdfabd0ba5e53c6ea2849a034a010998107fd62" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.828256 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fd69500bf05386473742672cfdfabd0ba5e53c6ea2849a034a010998107fd62"} err="failed to get container status \"6fd69500bf05386473742672cfdfabd0ba5e53c6ea2849a034a010998107fd62\": rpc error: code = NotFound desc = could not find container \"6fd69500bf05386473742672cfdfabd0ba5e53c6ea2849a034a010998107fd62\": container with ID starting with 6fd69500bf05386473742672cfdfabd0ba5e53c6ea2849a034a010998107fd62 not found: ID does not exist" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.828275 4721 scope.go:117] "RemoveContainer" containerID="1e07972f5499cd9722b51c64d003837027ad296aac2851843b87f31515436f89" Dec 02 12:11:23 crc kubenswrapper[4721]: E1202 12:11:23.828502 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e07972f5499cd9722b51c64d003837027ad296aac2851843b87f31515436f89\": container with ID starting with 1e07972f5499cd9722b51c64d003837027ad296aac2851843b87f31515436f89 not found: ID does not exist" containerID="1e07972f5499cd9722b51c64d003837027ad296aac2851843b87f31515436f89" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.828529 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e07972f5499cd9722b51c64d003837027ad296aac2851843b87f31515436f89"} err="failed to get container status \"1e07972f5499cd9722b51c64d003837027ad296aac2851843b87f31515436f89\": rpc error: code = NotFound desc = could not find container \"1e07972f5499cd9722b51c64d003837027ad296aac2851843b87f31515436f89\": container with ID starting with 1e07972f5499cd9722b51c64d003837027ad296aac2851843b87f31515436f89 not found: ID does not exist" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.828543 4721 scope.go:117] "RemoveContainer" containerID="1a58821aa2d18c78cf9ee3ebed4af2694174839aabf139e0e6f945a3a54dd270" Dec 02 12:11:23 crc kubenswrapper[4721]: E1202 12:11:23.828764 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a58821aa2d18c78cf9ee3ebed4af2694174839aabf139e0e6f945a3a54dd270\": container with ID starting with 1a58821aa2d18c78cf9ee3ebed4af2694174839aabf139e0e6f945a3a54dd270 not found: ID does not exist" containerID="1a58821aa2d18c78cf9ee3ebed4af2694174839aabf139e0e6f945a3a54dd270" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.828794 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a58821aa2d18c78cf9ee3ebed4af2694174839aabf139e0e6f945a3a54dd270"} err="failed to get container status \"1a58821aa2d18c78cf9ee3ebed4af2694174839aabf139e0e6f945a3a54dd270\": rpc error: code = NotFound desc = could not find container \"1a58821aa2d18c78cf9ee3ebed4af2694174839aabf139e0e6f945a3a54dd270\": container with ID starting with 1a58821aa2d18c78cf9ee3ebed4af2694174839aabf139e0e6f945a3a54dd270 not found: ID does not exist" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.828810 4721 scope.go:117] "RemoveContainer" containerID="e5aa55bbc88958f81c0074e5ce8a912e3147aa864c5b2b850db9efb3ab41ddd6" Dec 02 12:11:23 crc kubenswrapper[4721]: E1202 12:11:23.828996 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5aa55bbc88958f81c0074e5ce8a912e3147aa864c5b2b850db9efb3ab41ddd6\": container with ID starting with e5aa55bbc88958f81c0074e5ce8a912e3147aa864c5b2b850db9efb3ab41ddd6 not found: ID does not exist" containerID="e5aa55bbc88958f81c0074e5ce8a912e3147aa864c5b2b850db9efb3ab41ddd6" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.829021 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5aa55bbc88958f81c0074e5ce8a912e3147aa864c5b2b850db9efb3ab41ddd6"} err="failed to get container status \"e5aa55bbc88958f81c0074e5ce8a912e3147aa864c5b2b850db9efb3ab41ddd6\": rpc error: code = NotFound desc = could not find container \"e5aa55bbc88958f81c0074e5ce8a912e3147aa864c5b2b850db9efb3ab41ddd6\": container with ID starting with e5aa55bbc88958f81c0074e5ce8a912e3147aa864c5b2b850db9efb3ab41ddd6 not found: ID does not exist" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.829037 4721 scope.go:117] "RemoveContainer" containerID="270c4fb66daff1d8d57a60b18580156fc9694244d1f0de1bec2c8ac2f36f9907" Dec 02 12:11:23 crc kubenswrapper[4721]: E1202 12:11:23.829466 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"270c4fb66daff1d8d57a60b18580156fc9694244d1f0de1bec2c8ac2f36f9907\": container with ID starting with 270c4fb66daff1d8d57a60b18580156fc9694244d1f0de1bec2c8ac2f36f9907 not found: ID does not exist" containerID="270c4fb66daff1d8d57a60b18580156fc9694244d1f0de1bec2c8ac2f36f9907" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.829492 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"270c4fb66daff1d8d57a60b18580156fc9694244d1f0de1bec2c8ac2f36f9907"} err="failed to get container status \"270c4fb66daff1d8d57a60b18580156fc9694244d1f0de1bec2c8ac2f36f9907\": rpc error: code = NotFound desc = could not find container \"270c4fb66daff1d8d57a60b18580156fc9694244d1f0de1bec2c8ac2f36f9907\": container with ID starting with 270c4fb66daff1d8d57a60b18580156fc9694244d1f0de1bec2c8ac2f36f9907 not found: ID does not exist" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.829509 4721 scope.go:117] "RemoveContainer" containerID="5467bc17f876398c85911f415554a36185c7dd84bbff64110bf3c279c8483200" Dec 02 12:11:23 crc kubenswrapper[4721]: E1202 12:11:23.829705 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5467bc17f876398c85911f415554a36185c7dd84bbff64110bf3c279c8483200\": container with ID starting with 5467bc17f876398c85911f415554a36185c7dd84bbff64110bf3c279c8483200 not found: ID does not exist" containerID="5467bc17f876398c85911f415554a36185c7dd84bbff64110bf3c279c8483200" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.829729 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5467bc17f876398c85911f415554a36185c7dd84bbff64110bf3c279c8483200"} err="failed to get container status \"5467bc17f876398c85911f415554a36185c7dd84bbff64110bf3c279c8483200\": rpc error: code = NotFound desc = could not find container \"5467bc17f876398c85911f415554a36185c7dd84bbff64110bf3c279c8483200\": container with ID starting with 5467bc17f876398c85911f415554a36185c7dd84bbff64110bf3c279c8483200 not found: ID does not exist" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.829746 4721 scope.go:117] "RemoveContainer" containerID="cd57944d6e44a8ede134d360793593fc774b90b4826e4d2196762778b5fc087c" Dec 02 12:11:23 crc kubenswrapper[4721]: E1202 12:11:23.829969 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd57944d6e44a8ede134d360793593fc774b90b4826e4d2196762778b5fc087c\": container with ID starting with cd57944d6e44a8ede134d360793593fc774b90b4826e4d2196762778b5fc087c not found: ID does not exist" containerID="cd57944d6e44a8ede134d360793593fc774b90b4826e4d2196762778b5fc087c" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.829995 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd57944d6e44a8ede134d360793593fc774b90b4826e4d2196762778b5fc087c"} err="failed to get container status \"cd57944d6e44a8ede134d360793593fc774b90b4826e4d2196762778b5fc087c\": rpc error: code = NotFound desc = could not find container \"cd57944d6e44a8ede134d360793593fc774b90b4826e4d2196762778b5fc087c\": container with ID starting with cd57944d6e44a8ede134d360793593fc774b90b4826e4d2196762778b5fc087c not found: ID does not exist" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.830014 4721 scope.go:117] "RemoveContainer" containerID="65799d55d1b5c936b5df5cb2f6d3f6736f2488ece2c9a056c8dbfaa7346f84d6" Dec 02 12:11:23 crc kubenswrapper[4721]: E1202 12:11:23.830200 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65799d55d1b5c936b5df5cb2f6d3f6736f2488ece2c9a056c8dbfaa7346f84d6\": container with ID starting with 65799d55d1b5c936b5df5cb2f6d3f6736f2488ece2c9a056c8dbfaa7346f84d6 not found: ID does not exist" containerID="65799d55d1b5c936b5df5cb2f6d3f6736f2488ece2c9a056c8dbfaa7346f84d6" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.830225 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65799d55d1b5c936b5df5cb2f6d3f6736f2488ece2c9a056c8dbfaa7346f84d6"} err="failed to get container status \"65799d55d1b5c936b5df5cb2f6d3f6736f2488ece2c9a056c8dbfaa7346f84d6\": rpc error: code = NotFound desc = could not find container \"65799d55d1b5c936b5df5cb2f6d3f6736f2488ece2c9a056c8dbfaa7346f84d6\": container with ID starting with 65799d55d1b5c936b5df5cb2f6d3f6736f2488ece2c9a056c8dbfaa7346f84d6 not found: ID does not exist" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.830239 4721 scope.go:117] "RemoveContainer" containerID="b583c65dcf1e1667c262d27f780ae405536c139dc4ae684d0b25d3ecfaeedeb2" Dec 02 12:11:23 crc kubenswrapper[4721]: E1202 12:11:23.830484 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b583c65dcf1e1667c262d27f780ae405536c139dc4ae684d0b25d3ecfaeedeb2\": container with ID starting with b583c65dcf1e1667c262d27f780ae405536c139dc4ae684d0b25d3ecfaeedeb2 not found: ID does not exist" containerID="b583c65dcf1e1667c262d27f780ae405536c139dc4ae684d0b25d3ecfaeedeb2" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.830508 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b583c65dcf1e1667c262d27f780ae405536c139dc4ae684d0b25d3ecfaeedeb2"} err="failed to get container status \"b583c65dcf1e1667c262d27f780ae405536c139dc4ae684d0b25d3ecfaeedeb2\": rpc error: code = NotFound desc = could not find container \"b583c65dcf1e1667c262d27f780ae405536c139dc4ae684d0b25d3ecfaeedeb2\": container with ID starting with b583c65dcf1e1667c262d27f780ae405536c139dc4ae684d0b25d3ecfaeedeb2 not found: ID does not exist" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.830523 4721 scope.go:117] "RemoveContainer" containerID="f176422a2d531c26166f96b401d351a6fe34b52eb8bece2163366d4beaefbff4" Dec 02 12:11:23 crc kubenswrapper[4721]: E1202 12:11:23.830738 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f176422a2d531c26166f96b401d351a6fe34b52eb8bece2163366d4beaefbff4\": container with ID starting with f176422a2d531c26166f96b401d351a6fe34b52eb8bece2163366d4beaefbff4 not found: ID does not exist" containerID="f176422a2d531c26166f96b401d351a6fe34b52eb8bece2163366d4beaefbff4" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.830762 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f176422a2d531c26166f96b401d351a6fe34b52eb8bece2163366d4beaefbff4"} err="failed to get container status \"f176422a2d531c26166f96b401d351a6fe34b52eb8bece2163366d4beaefbff4\": rpc error: code = NotFound desc = could not find container \"f176422a2d531c26166f96b401d351a6fe34b52eb8bece2163366d4beaefbff4\": container with ID starting with f176422a2d531c26166f96b401d351a6fe34b52eb8bece2163366d4beaefbff4 not found: ID does not exist" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.830778 4721 scope.go:117] "RemoveContainer" containerID="a92cbb3c16ac4018f46d76bdd233a690ebdb976f966bacbd6f10368289751f89" Dec 02 12:11:23 crc kubenswrapper[4721]: E1202 12:11:23.830974 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a92cbb3c16ac4018f46d76bdd233a690ebdb976f966bacbd6f10368289751f89\": container with ID starting with a92cbb3c16ac4018f46d76bdd233a690ebdb976f966bacbd6f10368289751f89 not found: ID does not exist" containerID="a92cbb3c16ac4018f46d76bdd233a690ebdb976f966bacbd6f10368289751f89" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.830997 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a92cbb3c16ac4018f46d76bdd233a690ebdb976f966bacbd6f10368289751f89"} err="failed to get container status \"a92cbb3c16ac4018f46d76bdd233a690ebdb976f966bacbd6f10368289751f89\": rpc error: code = NotFound desc = could not find container \"a92cbb3c16ac4018f46d76bdd233a690ebdb976f966bacbd6f10368289751f89\": container with ID starting with a92cbb3c16ac4018f46d76bdd233a690ebdb976f966bacbd6f10368289751f89 not found: ID does not exist" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.831013 4721 scope.go:117] "RemoveContainer" containerID="099a21d19c8049b4e27934f6185161bf78b3d33f6e2df9eeb2d31c3c23a8f3fc" Dec 02 12:11:23 crc kubenswrapper[4721]: E1202 12:11:23.831220 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"099a21d19c8049b4e27934f6185161bf78b3d33f6e2df9eeb2d31c3c23a8f3fc\": container with ID starting with 099a21d19c8049b4e27934f6185161bf78b3d33f6e2df9eeb2d31c3c23a8f3fc not found: ID does not exist" containerID="099a21d19c8049b4e27934f6185161bf78b3d33f6e2df9eeb2d31c3c23a8f3fc" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.831245 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"099a21d19c8049b4e27934f6185161bf78b3d33f6e2df9eeb2d31c3c23a8f3fc"} err="failed to get container status \"099a21d19c8049b4e27934f6185161bf78b3d33f6e2df9eeb2d31c3c23a8f3fc\": rpc error: code = NotFound desc = could not find container \"099a21d19c8049b4e27934f6185161bf78b3d33f6e2df9eeb2d31c3c23a8f3fc\": container with ID starting with 099a21d19c8049b4e27934f6185161bf78b3d33f6e2df9eeb2d31c3c23a8f3fc not found: ID does not exist" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.831263 4721 scope.go:117] "RemoveContainer" containerID="9e68c8145c46c51de310d82790fa92bfbcf3508bfe0b206a0a0f365566661343" Dec 02 12:11:23 crc kubenswrapper[4721]: E1202 12:11:23.831514 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e68c8145c46c51de310d82790fa92bfbcf3508bfe0b206a0a0f365566661343\": container with ID starting with 9e68c8145c46c51de310d82790fa92bfbcf3508bfe0b206a0a0f365566661343 not found: ID does not exist" containerID="9e68c8145c46c51de310d82790fa92bfbcf3508bfe0b206a0a0f365566661343" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.831540 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e68c8145c46c51de310d82790fa92bfbcf3508bfe0b206a0a0f365566661343"} err="failed to get container status \"9e68c8145c46c51de310d82790fa92bfbcf3508bfe0b206a0a0f365566661343\": rpc error: code = NotFound desc = could not find container \"9e68c8145c46c51de310d82790fa92bfbcf3508bfe0b206a0a0f365566661343\": container with ID starting with 9e68c8145c46c51de310d82790fa92bfbcf3508bfe0b206a0a0f365566661343 not found: ID does not exist" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.831555 4721 scope.go:117] "RemoveContainer" containerID="f5a932dca5f2810a25a775136f518bba1a4e05d017c2f1ed37482e545105d6dd" Dec 02 12:11:23 crc kubenswrapper[4721]: E1202 12:11:23.831847 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5a932dca5f2810a25a775136f518bba1a4e05d017c2f1ed37482e545105d6dd\": container with ID starting with f5a932dca5f2810a25a775136f518bba1a4e05d017c2f1ed37482e545105d6dd not found: ID does not exist" containerID="f5a932dca5f2810a25a775136f518bba1a4e05d017c2f1ed37482e545105d6dd" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.831873 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5a932dca5f2810a25a775136f518bba1a4e05d017c2f1ed37482e545105d6dd"} err="failed to get container status \"f5a932dca5f2810a25a775136f518bba1a4e05d017c2f1ed37482e545105d6dd\": rpc error: code = NotFound desc = could not find container \"f5a932dca5f2810a25a775136f518bba1a4e05d017c2f1ed37482e545105d6dd\": container with ID starting with f5a932dca5f2810a25a775136f518bba1a4e05d017c2f1ed37482e545105d6dd not found: ID does not exist" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.831888 4721 scope.go:117] "RemoveContainer" containerID="823ead0e1dba1a6808b1545c985f36a5a06fbfff4dd41b9505ef04739ce6c35b" Dec 02 12:11:23 crc kubenswrapper[4721]: E1202 12:11:23.832138 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"823ead0e1dba1a6808b1545c985f36a5a06fbfff4dd41b9505ef04739ce6c35b\": container with ID starting with 823ead0e1dba1a6808b1545c985f36a5a06fbfff4dd41b9505ef04739ce6c35b not found: ID does not exist" containerID="823ead0e1dba1a6808b1545c985f36a5a06fbfff4dd41b9505ef04739ce6c35b" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.832159 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"823ead0e1dba1a6808b1545c985f36a5a06fbfff4dd41b9505ef04739ce6c35b"} err="failed to get container status \"823ead0e1dba1a6808b1545c985f36a5a06fbfff4dd41b9505ef04739ce6c35b\": rpc error: code = NotFound desc = could not find container \"823ead0e1dba1a6808b1545c985f36a5a06fbfff4dd41b9505ef04739ce6c35b\": container with ID starting with 823ead0e1dba1a6808b1545c985f36a5a06fbfff4dd41b9505ef04739ce6c35b not found: ID does not exist" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.832173 4721 scope.go:117] "RemoveContainer" containerID="bca906d1cc3affbe17a28df28f0505ed399d602c1464a98f21b5efa1f72368a3" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.848112 4721 scope.go:117] "RemoveContainer" containerID="fdace05da094705c18217f06a7a51510f7e8b78478183bbe68593716361576c1" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.868709 4721 scope.go:117] "RemoveContainer" containerID="48d0a3ea165bcf7be03ce424410c59c646b11008f505aedca257771e05d47d7b" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.889273 4721 scope.go:117] "RemoveContainer" containerID="e204416ba36c46cda9a223da1214bf8edf260cb4e5c3045aa5a803e1a0724a6c" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.909863 4721 scope.go:117] "RemoveContainer" containerID="ce6811fa26efd6c19e05e8fbedc6f1bba94e07d1697b8654d725090db0856174" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.924913 4721 scope.go:117] "RemoveContainer" containerID="a8f531ad5245090f9458ffaf6b15767b2cf1e267e55587129ff8745980b7dc83" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.939081 4721 scope.go:117] "RemoveContainer" containerID="40af61249b0b7b921b5139f31ceff4851448a0fad0e7bd3973356446ae2b0851" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.953990 4721 scope.go:117] "RemoveContainer" containerID="821584e74f43e336348ba071bdabdfc3e5d8e843b62a6261110261e0b92582a8" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.970736 4721 scope.go:117] "RemoveContainer" containerID="e83dc368eb5822bf3f3d2476ee5ed27744fdcb5dcfd00abcf7a01c1b0ae2e44d" Dec 02 12:11:23 crc kubenswrapper[4721]: I1202 12:11:23.983696 4721 scope.go:117] "RemoveContainer" containerID="c9ad5b423e11c9c124a474d325df92864a94a39e95c34bdf81c15770a2e3b3ff" Dec 02 12:11:24 crc kubenswrapper[4721]: I1202 12:11:24.001324 4721 scope.go:117] "RemoveContainer" containerID="088dff10742ea220c32ab9d82c56f0c66f001abfc1d5937f1374200d556f515c" Dec 02 12:11:24 crc kubenswrapper[4721]: I1202 12:11:24.023019 4721 scope.go:117] "RemoveContainer" containerID="f513e4ce4313743e980239aeb5eefca6c4b1e395a3c36451bf6d2161c7366c64" Dec 02 12:11:24 crc kubenswrapper[4721]: I1202 12:11:24.035805 4721 scope.go:117] "RemoveContainer" containerID="802178876a4950d3ce589b0214d46ed8a710139f5d544d3417bb944c86774e96" Dec 02 12:11:24 crc kubenswrapper[4721]: I1202 12:11:24.048457 4721 scope.go:117] "RemoveContainer" containerID="03c4f2324972362948f96ee55e110ab525ab6abfa434439a0e4df3e0a866ff71" Dec 02 12:11:24 crc kubenswrapper[4721]: I1202 12:11:24.071388 4721 scope.go:117] "RemoveContainer" containerID="995b2f3e6c864718f742047ed355ec10f9b204fdfff5a65859243e596dc1f883" Dec 02 12:11:24 crc kubenswrapper[4721]: I1202 12:11:24.085245 4721 scope.go:117] "RemoveContainer" containerID="bca906d1cc3affbe17a28df28f0505ed399d602c1464a98f21b5efa1f72368a3" Dec 02 12:11:24 crc kubenswrapper[4721]: E1202 12:11:24.085735 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bca906d1cc3affbe17a28df28f0505ed399d602c1464a98f21b5efa1f72368a3\": container with ID starting with bca906d1cc3affbe17a28df28f0505ed399d602c1464a98f21b5efa1f72368a3 not found: ID does not exist" containerID="bca906d1cc3affbe17a28df28f0505ed399d602c1464a98f21b5efa1f72368a3" Dec 02 12:11:24 crc kubenswrapper[4721]: I1202 12:11:24.085785 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bca906d1cc3affbe17a28df28f0505ed399d602c1464a98f21b5efa1f72368a3"} err="failed to get container status \"bca906d1cc3affbe17a28df28f0505ed399d602c1464a98f21b5efa1f72368a3\": rpc error: code = NotFound desc = could not find container \"bca906d1cc3affbe17a28df28f0505ed399d602c1464a98f21b5efa1f72368a3\": container with ID starting with bca906d1cc3affbe17a28df28f0505ed399d602c1464a98f21b5efa1f72368a3 not found: ID does not exist" Dec 02 12:11:24 crc kubenswrapper[4721]: I1202 12:11:24.085818 4721 scope.go:117] "RemoveContainer" containerID="fdace05da094705c18217f06a7a51510f7e8b78478183bbe68593716361576c1" Dec 02 12:11:24 crc kubenswrapper[4721]: E1202 12:11:24.086202 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fdace05da094705c18217f06a7a51510f7e8b78478183bbe68593716361576c1\": container with ID starting with fdace05da094705c18217f06a7a51510f7e8b78478183bbe68593716361576c1 not found: ID does not exist" containerID="fdace05da094705c18217f06a7a51510f7e8b78478183bbe68593716361576c1" Dec 02 12:11:24 crc kubenswrapper[4721]: I1202 12:11:24.086233 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fdace05da094705c18217f06a7a51510f7e8b78478183bbe68593716361576c1"} err="failed to get container status \"fdace05da094705c18217f06a7a51510f7e8b78478183bbe68593716361576c1\": rpc error: code = NotFound desc = could not find container \"fdace05da094705c18217f06a7a51510f7e8b78478183bbe68593716361576c1\": container with ID starting with fdace05da094705c18217f06a7a51510f7e8b78478183bbe68593716361576c1 not found: ID does not exist" Dec 02 12:11:24 crc kubenswrapper[4721]: I1202 12:11:24.086262 4721 scope.go:117] "RemoveContainer" containerID="48d0a3ea165bcf7be03ce424410c59c646b11008f505aedca257771e05d47d7b" Dec 02 12:11:24 crc kubenswrapper[4721]: E1202 12:11:24.086689 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48d0a3ea165bcf7be03ce424410c59c646b11008f505aedca257771e05d47d7b\": container with ID starting with 48d0a3ea165bcf7be03ce424410c59c646b11008f505aedca257771e05d47d7b not found: ID does not exist" containerID="48d0a3ea165bcf7be03ce424410c59c646b11008f505aedca257771e05d47d7b" Dec 02 12:11:24 crc kubenswrapper[4721]: I1202 12:11:24.086735 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48d0a3ea165bcf7be03ce424410c59c646b11008f505aedca257771e05d47d7b"} err="failed to get container status \"48d0a3ea165bcf7be03ce424410c59c646b11008f505aedca257771e05d47d7b\": rpc error: code = NotFound desc = could not find container \"48d0a3ea165bcf7be03ce424410c59c646b11008f505aedca257771e05d47d7b\": container with ID starting with 48d0a3ea165bcf7be03ce424410c59c646b11008f505aedca257771e05d47d7b not found: ID does not exist" Dec 02 12:11:24 crc kubenswrapper[4721]: I1202 12:11:24.086769 4721 scope.go:117] "RemoveContainer" containerID="e204416ba36c46cda9a223da1214bf8edf260cb4e5c3045aa5a803e1a0724a6c" Dec 02 12:11:24 crc kubenswrapper[4721]: E1202 12:11:24.087041 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e204416ba36c46cda9a223da1214bf8edf260cb4e5c3045aa5a803e1a0724a6c\": container with ID starting with e204416ba36c46cda9a223da1214bf8edf260cb4e5c3045aa5a803e1a0724a6c not found: ID does not exist" containerID="e204416ba36c46cda9a223da1214bf8edf260cb4e5c3045aa5a803e1a0724a6c" Dec 02 12:11:24 crc kubenswrapper[4721]: I1202 12:11:24.087064 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e204416ba36c46cda9a223da1214bf8edf260cb4e5c3045aa5a803e1a0724a6c"} err="failed to get container status \"e204416ba36c46cda9a223da1214bf8edf260cb4e5c3045aa5a803e1a0724a6c\": rpc error: code = NotFound desc = could not find container \"e204416ba36c46cda9a223da1214bf8edf260cb4e5c3045aa5a803e1a0724a6c\": container with ID starting with e204416ba36c46cda9a223da1214bf8edf260cb4e5c3045aa5a803e1a0724a6c not found: ID does not exist" Dec 02 12:11:24 crc kubenswrapper[4721]: I1202 12:11:24.087078 4721 scope.go:117] "RemoveContainer" containerID="ce6811fa26efd6c19e05e8fbedc6f1bba94e07d1697b8654d725090db0856174" Dec 02 12:11:24 crc kubenswrapper[4721]: E1202 12:11:24.087305 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce6811fa26efd6c19e05e8fbedc6f1bba94e07d1697b8654d725090db0856174\": container with ID starting with ce6811fa26efd6c19e05e8fbedc6f1bba94e07d1697b8654d725090db0856174 not found: ID does not exist" containerID="ce6811fa26efd6c19e05e8fbedc6f1bba94e07d1697b8654d725090db0856174" Dec 02 12:11:24 crc kubenswrapper[4721]: I1202 12:11:24.087356 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce6811fa26efd6c19e05e8fbedc6f1bba94e07d1697b8654d725090db0856174"} err="failed to get container status \"ce6811fa26efd6c19e05e8fbedc6f1bba94e07d1697b8654d725090db0856174\": rpc error: code = NotFound desc = could not find container \"ce6811fa26efd6c19e05e8fbedc6f1bba94e07d1697b8654d725090db0856174\": container with ID starting with ce6811fa26efd6c19e05e8fbedc6f1bba94e07d1697b8654d725090db0856174 not found: ID does not exist" Dec 02 12:11:24 crc kubenswrapper[4721]: I1202 12:11:24.087376 4721 scope.go:117] "RemoveContainer" containerID="a8f531ad5245090f9458ffaf6b15767b2cf1e267e55587129ff8745980b7dc83" Dec 02 12:11:24 crc kubenswrapper[4721]: E1202 12:11:24.087640 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8f531ad5245090f9458ffaf6b15767b2cf1e267e55587129ff8745980b7dc83\": container with ID starting with a8f531ad5245090f9458ffaf6b15767b2cf1e267e55587129ff8745980b7dc83 not found: ID does not exist" containerID="a8f531ad5245090f9458ffaf6b15767b2cf1e267e55587129ff8745980b7dc83" Dec 02 12:11:24 crc kubenswrapper[4721]: I1202 12:11:24.087664 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8f531ad5245090f9458ffaf6b15767b2cf1e267e55587129ff8745980b7dc83"} err="failed to get container status \"a8f531ad5245090f9458ffaf6b15767b2cf1e267e55587129ff8745980b7dc83\": rpc error: code = NotFound desc = could not find container \"a8f531ad5245090f9458ffaf6b15767b2cf1e267e55587129ff8745980b7dc83\": container with ID starting with a8f531ad5245090f9458ffaf6b15767b2cf1e267e55587129ff8745980b7dc83 not found: ID does not exist" Dec 02 12:11:24 crc kubenswrapper[4721]: I1202 12:11:24.851157 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" path="/var/lib/kubelet/pods/09e554d6-e3a5-437e-a153-3ed91fadb78d/volumes" Dec 02 12:11:24 crc kubenswrapper[4721]: I1202 12:11:24.855231 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" path="/var/lib/kubelet/pods/1cff80c4-dc01-4e68-9cb7-ae41a5f05973/volumes" Dec 02 12:11:24 crc kubenswrapper[4721]: I1202 12:11:24.856949 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27eb4674-477b-4189-857a-d5a572e89277" path="/var/lib/kubelet/pods/27eb4674-477b-4189-857a-d5a572e89277/volumes" Dec 02 12:11:25 crc kubenswrapper[4721]: I1202 12:11:25.703585 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mcr6s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 12:11:25 crc kubenswrapper[4721]: I1202 12:11:25.703673 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.453504 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.454255 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="object-auditor" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.454277 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="object-auditor" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.454302 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="container-replicator" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.454315 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="container-replicator" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.454330 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="container-replicator" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.454443 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="container-replicator" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.454465 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="container-auditor" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.454477 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="container-auditor" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.454500 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="container-updater" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.454514 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="container-updater" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.454531 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="account-server" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.454544 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="account-server" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.454566 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="container-server" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.454578 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="container-server" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.454596 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="account-auditor" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.454608 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="account-auditor" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.454628 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="object-replicator" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.454640 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="object-replicator" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.454664 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="container-auditor" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.454675 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="container-auditor" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.454696 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="swift-recon-cron" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.454708 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="swift-recon-cron" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.454729 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="object-updater" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.454740 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="object-updater" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.454756 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="account-server" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.454768 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="account-server" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.454788 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="object-server" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.454800 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="object-server" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.454817 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="container-auditor" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.454828 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="container-auditor" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.454847 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="account-replicator" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.454860 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="account-replicator" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.454873 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="swift-recon-cron" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.454884 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="swift-recon-cron" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.454898 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="swift-recon-cron" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.454909 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="swift-recon-cron" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.454929 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="container-server" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.454940 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="container-server" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.454955 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="container-replicator" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.454968 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="container-replicator" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.454983 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="rsync" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.454995 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="rsync" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.455007 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1498f09-7529-4b3e-b2b8-a11b5a5a4624" containerName="proxy-server" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.455019 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1498f09-7529-4b3e-b2b8-a11b5a5a4624" containerName="proxy-server" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.455042 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="container-updater" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.455054 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="container-updater" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.455069 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="object-expirer" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.455081 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="object-expirer" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.455100 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44cd03cd-76ac-42f0-925b-3cea33940cb2" containerName="swift-ring-rebalance" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.455111 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="44cd03cd-76ac-42f0-925b-3cea33940cb2" containerName="swift-ring-rebalance" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.455124 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="account-auditor" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.455136 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="account-auditor" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.455156 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="object-replicator" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.455169 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="object-replicator" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.455184 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="account-replicator" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.455196 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="account-replicator" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.455219 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1498f09-7529-4b3e-b2b8-a11b5a5a4624" containerName="proxy-httpd" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.455231 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1498f09-7529-4b3e-b2b8-a11b5a5a4624" containerName="proxy-httpd" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.455245 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="object-server" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.455257 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="object-server" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.455270 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="object-expirer" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.455282 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="object-expirer" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.455302 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="rsync" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.455314 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="rsync" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.455335 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="object-auditor" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.455372 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="object-auditor" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.455393 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="object-replicator" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.455404 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="object-replicator" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.455424 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="object-expirer" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.455437 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="object-expirer" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.455453 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="object-updater" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.455465 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="object-updater" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.455486 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="account-reaper" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.455498 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="account-reaper" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.455520 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="account-server" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.455532 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="account-server" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.455547 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="container-updater" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.455560 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="container-updater" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.455577 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="account-auditor" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.455589 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="account-auditor" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.455602 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="account-reaper" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.455613 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="account-reaper" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.455627 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="account-reaper" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.455640 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="account-reaper" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.455656 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="account-replicator" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.455668 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="account-replicator" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.455684 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="rsync" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.455696 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="rsync" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.455715 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="object-updater" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.455727 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="object-updater" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.455741 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="object-auditor" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.455753 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="object-auditor" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.455771 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="object-server" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.455783 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="object-server" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.455798 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="container-server" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.455810 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="container-server" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456078 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="object-auditor" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456108 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="account-replicator" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456124 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="account-server" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456138 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="account-server" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456154 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="account-auditor" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456170 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="object-server" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456192 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="container-auditor" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456213 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="container-replicator" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456230 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1498f09-7529-4b3e-b2b8-a11b5a5a4624" containerName="proxy-httpd" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456249 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="object-expirer" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456263 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="account-auditor" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456275 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="account-reaper" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456291 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="container-server" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456310 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="container-server" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456329 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="object-server" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456371 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="account-replicator" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456384 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="container-updater" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456397 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="object-auditor" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456416 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="rsync" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456435 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="container-server" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456451 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="object-server" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456469 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="account-auditor" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456490 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1498f09-7529-4b3e-b2b8-a11b5a5a4624" containerName="proxy-server" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456507 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="container-auditor" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456523 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="object-replicator" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456539 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="swift-recon-cron" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456554 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="container-replicator" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456571 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="rsync" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456585 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="object-auditor" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456598 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="container-auditor" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456613 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="object-replicator" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456627 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="object-replicator" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456643 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="account-reaper" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456655 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="object-updater" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456669 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="44cd03cd-76ac-42f0-925b-3cea33940cb2" containerName="swift-ring-rebalance" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456687 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="swift-recon-cron" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456732 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="account-replicator" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456752 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="account-reaper" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456765 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="container-replicator" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456783 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="swift-recon-cron" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456801 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="rsync" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456817 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="account-server" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456833 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cff80c4-dc01-4e68-9cb7-ae41a5f05973" containerName="object-updater" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456845 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="object-expirer" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456865 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="container-updater" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456883 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="object-updater" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456899 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="27eb4674-477b-4189-857a-d5a572e89277" containerName="container-updater" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.456912 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="09e554d6-e3a5-437e-a153-3ed91fadb78d" containerName="object-expirer" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.464764 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.467002 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.468620 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-files" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.468746 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-conf" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.468891 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-storage-config-data" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.469160 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-swift-dockercfg-qw5k6" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.586272 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/abd15e9e-08cc-451c-903a-09250321067c-cache\") pod \"swift-storage-0\" (UID: \"abd15e9e-08cc-451c-903a-09250321067c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.586433 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/abd15e9e-08cc-451c-903a-09250321067c-etc-swift\") pod \"swift-storage-0\" (UID: \"abd15e9e-08cc-451c-903a-09250321067c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.586458 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkcks\" (UniqueName: \"kubernetes.io/projected/abd15e9e-08cc-451c-903a-09250321067c-kube-api-access-wkcks\") pod \"swift-storage-0\" (UID: \"abd15e9e-08cc-451c-903a-09250321067c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.586486 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/abd15e9e-08cc-451c-903a-09250321067c-lock\") pod \"swift-storage-0\" (UID: \"abd15e9e-08cc-451c-903a-09250321067c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.586549 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"abd15e9e-08cc-451c-903a-09250321067c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.688094 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/abd15e9e-08cc-451c-903a-09250321067c-etc-swift\") pod \"swift-storage-0\" (UID: \"abd15e9e-08cc-451c-903a-09250321067c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.688130 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkcks\" (UniqueName: \"kubernetes.io/projected/abd15e9e-08cc-451c-903a-09250321067c-kube-api-access-wkcks\") pod \"swift-storage-0\" (UID: \"abd15e9e-08cc-451c-903a-09250321067c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.688154 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/abd15e9e-08cc-451c-903a-09250321067c-lock\") pod \"swift-storage-0\" (UID: \"abd15e9e-08cc-451c-903a-09250321067c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.688195 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"abd15e9e-08cc-451c-903a-09250321067c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.688258 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/abd15e9e-08cc-451c-903a-09250321067c-cache\") pod \"swift-storage-0\" (UID: \"abd15e9e-08cc-451c-903a-09250321067c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.688291 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.688314 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 02 12:11:26 crc kubenswrapper[4721]: E1202 12:11:26.688380 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/abd15e9e-08cc-451c-903a-09250321067c-etc-swift podName:abd15e9e-08cc-451c-903a-09250321067c nodeName:}" failed. No retries permitted until 2025-12-02 12:11:27.18835966 +0000 UTC m=+1216.926910009 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/abd15e9e-08cc-451c-903a-09250321067c-etc-swift") pod "swift-storage-0" (UID: "abd15e9e-08cc-451c-903a-09250321067c") : configmap "swift-ring-files" not found Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.688682 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/abd15e9e-08cc-451c-903a-09250321067c-lock\") pod \"swift-storage-0\" (UID: \"abd15e9e-08cc-451c-903a-09250321067c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.688772 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/abd15e9e-08cc-451c-903a-09250321067c-cache\") pod \"swift-storage-0\" (UID: \"abd15e9e-08cc-451c-903a-09250321067c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.688786 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"abd15e9e-08cc-451c-903a-09250321067c\") device mount path \"/mnt/openstack/pv11\"" pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.707550 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"abd15e9e-08cc-451c-903a-09250321067c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:11:26 crc kubenswrapper[4721]: I1202 12:11:26.720276 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkcks\" (UniqueName: \"kubernetes.io/projected/abd15e9e-08cc-451c-903a-09250321067c-kube-api-access-wkcks\") pod \"swift-storage-0\" (UID: \"abd15e9e-08cc-451c-903a-09250321067c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:11:27 crc kubenswrapper[4721]: I1202 12:11:27.196209 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/abd15e9e-08cc-451c-903a-09250321067c-etc-swift\") pod \"swift-storage-0\" (UID: \"abd15e9e-08cc-451c-903a-09250321067c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:11:27 crc kubenswrapper[4721]: E1202 12:11:27.196556 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:11:27 crc kubenswrapper[4721]: E1202 12:11:27.196587 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 02 12:11:27 crc kubenswrapper[4721]: E1202 12:11:27.196656 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/abd15e9e-08cc-451c-903a-09250321067c-etc-swift podName:abd15e9e-08cc-451c-903a-09250321067c nodeName:}" failed. No retries permitted until 2025-12-02 12:11:28.196632583 +0000 UTC m=+1217.935182962 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/abd15e9e-08cc-451c-903a-09250321067c-etc-swift") pod "swift-storage-0" (UID: "abd15e9e-08cc-451c-903a-09250321067c") : configmap "swift-ring-files" not found Dec 02 12:11:28 crc kubenswrapper[4721]: I1202 12:11:28.211628 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/abd15e9e-08cc-451c-903a-09250321067c-etc-swift\") pod \"swift-storage-0\" (UID: \"abd15e9e-08cc-451c-903a-09250321067c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:11:28 crc kubenswrapper[4721]: E1202 12:11:28.211909 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:11:28 crc kubenswrapper[4721]: E1202 12:11:28.212133 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 02 12:11:28 crc kubenswrapper[4721]: E1202 12:11:28.212183 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/abd15e9e-08cc-451c-903a-09250321067c-etc-swift podName:abd15e9e-08cc-451c-903a-09250321067c nodeName:}" failed. No retries permitted until 2025-12-02 12:11:30.212166197 +0000 UTC m=+1219.950716556 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/abd15e9e-08cc-451c-903a-09250321067c-etc-swift") pod "swift-storage-0" (UID: "abd15e9e-08cc-451c-903a-09250321067c") : configmap "swift-ring-files" not found Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.240404 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/abd15e9e-08cc-451c-903a-09250321067c-etc-swift\") pod \"swift-storage-0\" (UID: \"abd15e9e-08cc-451c-903a-09250321067c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:11:30 crc kubenswrapper[4721]: E1202 12:11:30.240574 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:11:30 crc kubenswrapper[4721]: E1202 12:11:30.240599 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 02 12:11:30 crc kubenswrapper[4721]: E1202 12:11:30.240658 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/abd15e9e-08cc-451c-903a-09250321067c-etc-swift podName:abd15e9e-08cc-451c-903a-09250321067c nodeName:}" failed. No retries permitted until 2025-12-02 12:11:34.240641545 +0000 UTC m=+1223.979191894 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/abd15e9e-08cc-451c-903a-09250321067c-etc-swift") pod "swift-storage-0" (UID: "abd15e9e-08cc-451c-903a-09250321067c") : configmap "swift-ring-files" not found Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.329588 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-dv65n"] Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.330768 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-dv65n" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.332973 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-proxy-config-data" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.333083 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.333149 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.356287 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-dv65n"] Dec 02 12:11:30 crc kubenswrapper[4721]: E1202 12:11:30.356984 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[dispersionconf etc-swift kube-api-access-g6bxq ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[]: context canceled" pod="swift-kuttl-tests/swift-ring-rebalance-dv65n" podUID="0cbee2dc-1b89-4350-bc63-0702f498b8fe" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.367543 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-dqs75"] Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.368684 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-dqs75" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.387573 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-dv65n"] Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.397894 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-dqs75"] Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.444453 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6bxq\" (UniqueName: \"kubernetes.io/projected/0cbee2dc-1b89-4350-bc63-0702f498b8fe-kube-api-access-g6bxq\") pod \"swift-ring-rebalance-dv65n\" (UID: \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\") " pod="swift-kuttl-tests/swift-ring-rebalance-dv65n" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.444499 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0cbee2dc-1b89-4350-bc63-0702f498b8fe-swiftconf\") pod \"swift-ring-rebalance-dv65n\" (UID: \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\") " pod="swift-kuttl-tests/swift-ring-rebalance-dv65n" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.444628 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0cbee2dc-1b89-4350-bc63-0702f498b8fe-ring-data-devices\") pod \"swift-ring-rebalance-dv65n\" (UID: \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\") " pod="swift-kuttl-tests/swift-ring-rebalance-dv65n" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.444646 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0cbee2dc-1b89-4350-bc63-0702f498b8fe-scripts\") pod \"swift-ring-rebalance-dv65n\" (UID: \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\") " pod="swift-kuttl-tests/swift-ring-rebalance-dv65n" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.444662 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0cbee2dc-1b89-4350-bc63-0702f498b8fe-dispersionconf\") pod \"swift-ring-rebalance-dv65n\" (UID: \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\") " pod="swift-kuttl-tests/swift-ring-rebalance-dv65n" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.444755 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0cbee2dc-1b89-4350-bc63-0702f498b8fe-etc-swift\") pod \"swift-ring-rebalance-dv65n\" (UID: \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\") " pod="swift-kuttl-tests/swift-ring-rebalance-dv65n" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.545993 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmtxr\" (UniqueName: \"kubernetes.io/projected/fc0b7803-607e-4bc1-8268-a919c487ae58-kube-api-access-mmtxr\") pod \"swift-ring-rebalance-dqs75\" (UID: \"fc0b7803-607e-4bc1-8268-a919c487ae58\") " pod="swift-kuttl-tests/swift-ring-rebalance-dqs75" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.546055 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/fc0b7803-607e-4bc1-8268-a919c487ae58-etc-swift\") pod \"swift-ring-rebalance-dqs75\" (UID: \"fc0b7803-607e-4bc1-8268-a919c487ae58\") " pod="swift-kuttl-tests/swift-ring-rebalance-dqs75" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.546087 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0cbee2dc-1b89-4350-bc63-0702f498b8fe-ring-data-devices\") pod \"swift-ring-rebalance-dv65n\" (UID: \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\") " pod="swift-kuttl-tests/swift-ring-rebalance-dv65n" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.546107 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0cbee2dc-1b89-4350-bc63-0702f498b8fe-scripts\") pod \"swift-ring-rebalance-dv65n\" (UID: \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\") " pod="swift-kuttl-tests/swift-ring-rebalance-dv65n" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.546124 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0cbee2dc-1b89-4350-bc63-0702f498b8fe-dispersionconf\") pod \"swift-ring-rebalance-dv65n\" (UID: \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\") " pod="swift-kuttl-tests/swift-ring-rebalance-dv65n" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.546158 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0cbee2dc-1b89-4350-bc63-0702f498b8fe-etc-swift\") pod \"swift-ring-rebalance-dv65n\" (UID: \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\") " pod="swift-kuttl-tests/swift-ring-rebalance-dv65n" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.546197 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/fc0b7803-607e-4bc1-8268-a919c487ae58-ring-data-devices\") pod \"swift-ring-rebalance-dqs75\" (UID: \"fc0b7803-607e-4bc1-8268-a919c487ae58\") " pod="swift-kuttl-tests/swift-ring-rebalance-dqs75" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.546223 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fc0b7803-607e-4bc1-8268-a919c487ae58-scripts\") pod \"swift-ring-rebalance-dqs75\" (UID: \"fc0b7803-607e-4bc1-8268-a919c487ae58\") " pod="swift-kuttl-tests/swift-ring-rebalance-dqs75" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.546250 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/fc0b7803-607e-4bc1-8268-a919c487ae58-dispersionconf\") pod \"swift-ring-rebalance-dqs75\" (UID: \"fc0b7803-607e-4bc1-8268-a919c487ae58\") " pod="swift-kuttl-tests/swift-ring-rebalance-dqs75" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.546277 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6bxq\" (UniqueName: \"kubernetes.io/projected/0cbee2dc-1b89-4350-bc63-0702f498b8fe-kube-api-access-g6bxq\") pod \"swift-ring-rebalance-dv65n\" (UID: \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\") " pod="swift-kuttl-tests/swift-ring-rebalance-dv65n" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.546298 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/fc0b7803-607e-4bc1-8268-a919c487ae58-swiftconf\") pod \"swift-ring-rebalance-dqs75\" (UID: \"fc0b7803-607e-4bc1-8268-a919c487ae58\") " pod="swift-kuttl-tests/swift-ring-rebalance-dqs75" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.546316 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0cbee2dc-1b89-4350-bc63-0702f498b8fe-swiftconf\") pod \"swift-ring-rebalance-dv65n\" (UID: \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\") " pod="swift-kuttl-tests/swift-ring-rebalance-dv65n" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.546829 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0cbee2dc-1b89-4350-bc63-0702f498b8fe-ring-data-devices\") pod \"swift-ring-rebalance-dv65n\" (UID: \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\") " pod="swift-kuttl-tests/swift-ring-rebalance-dv65n" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.547210 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0cbee2dc-1b89-4350-bc63-0702f498b8fe-etc-swift\") pod \"swift-ring-rebalance-dv65n\" (UID: \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\") " pod="swift-kuttl-tests/swift-ring-rebalance-dv65n" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.547296 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0cbee2dc-1b89-4350-bc63-0702f498b8fe-scripts\") pod \"swift-ring-rebalance-dv65n\" (UID: \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\") " pod="swift-kuttl-tests/swift-ring-rebalance-dv65n" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.551255 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0cbee2dc-1b89-4350-bc63-0702f498b8fe-swiftconf\") pod \"swift-ring-rebalance-dv65n\" (UID: \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\") " pod="swift-kuttl-tests/swift-ring-rebalance-dv65n" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.554788 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0cbee2dc-1b89-4350-bc63-0702f498b8fe-dispersionconf\") pod \"swift-ring-rebalance-dv65n\" (UID: \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\") " pod="swift-kuttl-tests/swift-ring-rebalance-dv65n" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.564118 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6bxq\" (UniqueName: \"kubernetes.io/projected/0cbee2dc-1b89-4350-bc63-0702f498b8fe-kube-api-access-g6bxq\") pod \"swift-ring-rebalance-dv65n\" (UID: \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\") " pod="swift-kuttl-tests/swift-ring-rebalance-dv65n" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.619553 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-dv65n" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.627907 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-dv65n" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.647313 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fc0b7803-607e-4bc1-8268-a919c487ae58-scripts\") pod \"swift-ring-rebalance-dqs75\" (UID: \"fc0b7803-607e-4bc1-8268-a919c487ae58\") " pod="swift-kuttl-tests/swift-ring-rebalance-dqs75" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.647383 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/fc0b7803-607e-4bc1-8268-a919c487ae58-dispersionconf\") pod \"swift-ring-rebalance-dqs75\" (UID: \"fc0b7803-607e-4bc1-8268-a919c487ae58\") " pod="swift-kuttl-tests/swift-ring-rebalance-dqs75" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.647409 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/fc0b7803-607e-4bc1-8268-a919c487ae58-swiftconf\") pod \"swift-ring-rebalance-dqs75\" (UID: \"fc0b7803-607e-4bc1-8268-a919c487ae58\") " pod="swift-kuttl-tests/swift-ring-rebalance-dqs75" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.647446 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmtxr\" (UniqueName: \"kubernetes.io/projected/fc0b7803-607e-4bc1-8268-a919c487ae58-kube-api-access-mmtxr\") pod \"swift-ring-rebalance-dqs75\" (UID: \"fc0b7803-607e-4bc1-8268-a919c487ae58\") " pod="swift-kuttl-tests/swift-ring-rebalance-dqs75" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.647469 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/fc0b7803-607e-4bc1-8268-a919c487ae58-etc-swift\") pod \"swift-ring-rebalance-dqs75\" (UID: \"fc0b7803-607e-4bc1-8268-a919c487ae58\") " pod="swift-kuttl-tests/swift-ring-rebalance-dqs75" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.647526 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/fc0b7803-607e-4bc1-8268-a919c487ae58-ring-data-devices\") pod \"swift-ring-rebalance-dqs75\" (UID: \"fc0b7803-607e-4bc1-8268-a919c487ae58\") " pod="swift-kuttl-tests/swift-ring-rebalance-dqs75" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.648119 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fc0b7803-607e-4bc1-8268-a919c487ae58-scripts\") pod \"swift-ring-rebalance-dqs75\" (UID: \"fc0b7803-607e-4bc1-8268-a919c487ae58\") " pod="swift-kuttl-tests/swift-ring-rebalance-dqs75" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.648138 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/fc0b7803-607e-4bc1-8268-a919c487ae58-etc-swift\") pod \"swift-ring-rebalance-dqs75\" (UID: \"fc0b7803-607e-4bc1-8268-a919c487ae58\") " pod="swift-kuttl-tests/swift-ring-rebalance-dqs75" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.648183 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/fc0b7803-607e-4bc1-8268-a919c487ae58-ring-data-devices\") pod \"swift-ring-rebalance-dqs75\" (UID: \"fc0b7803-607e-4bc1-8268-a919c487ae58\") " pod="swift-kuttl-tests/swift-ring-rebalance-dqs75" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.651223 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/fc0b7803-607e-4bc1-8268-a919c487ae58-dispersionconf\") pod \"swift-ring-rebalance-dqs75\" (UID: \"fc0b7803-607e-4bc1-8268-a919c487ae58\") " pod="swift-kuttl-tests/swift-ring-rebalance-dqs75" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.654786 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/fc0b7803-607e-4bc1-8268-a919c487ae58-swiftconf\") pod \"swift-ring-rebalance-dqs75\" (UID: \"fc0b7803-607e-4bc1-8268-a919c487ae58\") " pod="swift-kuttl-tests/swift-ring-rebalance-dqs75" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.665548 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmtxr\" (UniqueName: \"kubernetes.io/projected/fc0b7803-607e-4bc1-8268-a919c487ae58-kube-api-access-mmtxr\") pod \"swift-ring-rebalance-dqs75\" (UID: \"fc0b7803-607e-4bc1-8268-a919c487ae58\") " pod="swift-kuttl-tests/swift-ring-rebalance-dqs75" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.685548 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-dqs75" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.748440 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g6bxq\" (UniqueName: \"kubernetes.io/projected/0cbee2dc-1b89-4350-bc63-0702f498b8fe-kube-api-access-g6bxq\") pod \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\" (UID: \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\") " Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.748550 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0cbee2dc-1b89-4350-bc63-0702f498b8fe-swiftconf\") pod \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\" (UID: \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\") " Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.748575 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0cbee2dc-1b89-4350-bc63-0702f498b8fe-scripts\") pod \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\" (UID: \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\") " Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.748611 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0cbee2dc-1b89-4350-bc63-0702f498b8fe-dispersionconf\") pod \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\" (UID: \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\") " Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.748638 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0cbee2dc-1b89-4350-bc63-0702f498b8fe-ring-data-devices\") pod \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\" (UID: \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\") " Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.748717 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0cbee2dc-1b89-4350-bc63-0702f498b8fe-etc-swift\") pod \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\" (UID: \"0cbee2dc-1b89-4350-bc63-0702f498b8fe\") " Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.749206 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0cbee2dc-1b89-4350-bc63-0702f498b8fe-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "0cbee2dc-1b89-4350-bc63-0702f498b8fe" (UID: "0cbee2dc-1b89-4350-bc63-0702f498b8fe"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.749279 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0cbee2dc-1b89-4350-bc63-0702f498b8fe-scripts" (OuterVolumeSpecName: "scripts") pod "0cbee2dc-1b89-4350-bc63-0702f498b8fe" (UID: "0cbee2dc-1b89-4350-bc63-0702f498b8fe"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.749467 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0cbee2dc-1b89-4350-bc63-0702f498b8fe-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "0cbee2dc-1b89-4350-bc63-0702f498b8fe" (UID: "0cbee2dc-1b89-4350-bc63-0702f498b8fe"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.751886 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cbee2dc-1b89-4350-bc63-0702f498b8fe-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "0cbee2dc-1b89-4350-bc63-0702f498b8fe" (UID: "0cbee2dc-1b89-4350-bc63-0702f498b8fe"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.754516 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cbee2dc-1b89-4350-bc63-0702f498b8fe-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "0cbee2dc-1b89-4350-bc63-0702f498b8fe" (UID: "0cbee2dc-1b89-4350-bc63-0702f498b8fe"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.755406 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0cbee2dc-1b89-4350-bc63-0702f498b8fe-kube-api-access-g6bxq" (OuterVolumeSpecName: "kube-api-access-g6bxq") pod "0cbee2dc-1b89-4350-bc63-0702f498b8fe" (UID: "0cbee2dc-1b89-4350-bc63-0702f498b8fe"). InnerVolumeSpecName "kube-api-access-g6bxq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.850302 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0cbee2dc-1b89-4350-bc63-0702f498b8fe-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.850359 4721 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0cbee2dc-1b89-4350-bc63-0702f498b8fe-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.850371 4721 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0cbee2dc-1b89-4350-bc63-0702f498b8fe-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.850384 4721 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0cbee2dc-1b89-4350-bc63-0702f498b8fe-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.850398 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0cbee2dc-1b89-4350-bc63-0702f498b8fe-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:11:30 crc kubenswrapper[4721]: I1202 12:11:30.850409 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g6bxq\" (UniqueName: \"kubernetes.io/projected/0cbee2dc-1b89-4350-bc63-0702f498b8fe-kube-api-access-g6bxq\") on node \"crc\" DevicePath \"\"" Dec 02 12:11:31 crc kubenswrapper[4721]: I1202 12:11:31.150966 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-dqs75"] Dec 02 12:11:31 crc kubenswrapper[4721]: I1202 12:11:31.627483 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-dv65n" Dec 02 12:11:31 crc kubenswrapper[4721]: I1202 12:11:31.627494 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-dqs75" event={"ID":"fc0b7803-607e-4bc1-8268-a919c487ae58","Type":"ContainerStarted","Data":"01b24f21030f43edfac8cb49a1c0d04e6cbccb0a6f2dbd44d56ea1f64f4cffed"} Dec 02 12:11:31 crc kubenswrapper[4721]: I1202 12:11:31.627999 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-dqs75" event={"ID":"fc0b7803-607e-4bc1-8268-a919c487ae58","Type":"ContainerStarted","Data":"9cef47b9827699ae15376730619a42f66cdf67cf5d39bcb2add288e6221ebcf4"} Dec 02 12:11:31 crc kubenswrapper[4721]: I1202 12:11:31.671643 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-dqs75" podStartSLOduration=1.671619583 podStartE2EDuration="1.671619583s" podCreationTimestamp="2025-12-02 12:11:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:11:31.645550481 +0000 UTC m=+1221.384100850" watchObservedRunningTime="2025-12-02 12:11:31.671619583 +0000 UTC m=+1221.410169952" Dec 02 12:11:31 crc kubenswrapper[4721]: I1202 12:11:31.697313 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-dv65n"] Dec 02 12:11:31 crc kubenswrapper[4721]: I1202 12:11:31.704210 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-dv65n"] Dec 02 12:11:32 crc kubenswrapper[4721]: I1202 12:11:32.850363 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0cbee2dc-1b89-4350-bc63-0702f498b8fe" path="/var/lib/kubelet/pods/0cbee2dc-1b89-4350-bc63-0702f498b8fe/volumes" Dec 02 12:11:34 crc kubenswrapper[4721]: I1202 12:11:34.304196 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/abd15e9e-08cc-451c-903a-09250321067c-etc-swift\") pod \"swift-storage-0\" (UID: \"abd15e9e-08cc-451c-903a-09250321067c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:11:34 crc kubenswrapper[4721]: E1202 12:11:34.304435 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:11:34 crc kubenswrapper[4721]: E1202 12:11:34.304624 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 02 12:11:34 crc kubenswrapper[4721]: E1202 12:11:34.304685 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/abd15e9e-08cc-451c-903a-09250321067c-etc-swift podName:abd15e9e-08cc-451c-903a-09250321067c nodeName:}" failed. No retries permitted until 2025-12-02 12:11:42.304665047 +0000 UTC m=+1232.043215396 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/abd15e9e-08cc-451c-903a-09250321067c-etc-swift") pod "swift-storage-0" (UID: "abd15e9e-08cc-451c-903a-09250321067c") : configmap "swift-ring-files" not found Dec 02 12:11:37 crc kubenswrapper[4721]: I1202 12:11:37.675690 4721 generic.go:334] "Generic (PLEG): container finished" podID="fc0b7803-607e-4bc1-8268-a919c487ae58" containerID="01b24f21030f43edfac8cb49a1c0d04e6cbccb0a6f2dbd44d56ea1f64f4cffed" exitCode=0 Dec 02 12:11:37 crc kubenswrapper[4721]: I1202 12:11:37.675765 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-dqs75" event={"ID":"fc0b7803-607e-4bc1-8268-a919c487ae58","Type":"ContainerDied","Data":"01b24f21030f43edfac8cb49a1c0d04e6cbccb0a6f2dbd44d56ea1f64f4cffed"} Dec 02 12:11:38 crc kubenswrapper[4721]: I1202 12:11:38.950602 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-dqs75" Dec 02 12:11:39 crc kubenswrapper[4721]: I1202 12:11:39.073274 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/fc0b7803-607e-4bc1-8268-a919c487ae58-swiftconf\") pod \"fc0b7803-607e-4bc1-8268-a919c487ae58\" (UID: \"fc0b7803-607e-4bc1-8268-a919c487ae58\") " Dec 02 12:11:39 crc kubenswrapper[4721]: I1202 12:11:39.073711 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mmtxr\" (UniqueName: \"kubernetes.io/projected/fc0b7803-607e-4bc1-8268-a919c487ae58-kube-api-access-mmtxr\") pod \"fc0b7803-607e-4bc1-8268-a919c487ae58\" (UID: \"fc0b7803-607e-4bc1-8268-a919c487ae58\") " Dec 02 12:11:39 crc kubenswrapper[4721]: I1202 12:11:39.073752 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fc0b7803-607e-4bc1-8268-a919c487ae58-scripts\") pod \"fc0b7803-607e-4bc1-8268-a919c487ae58\" (UID: \"fc0b7803-607e-4bc1-8268-a919c487ae58\") " Dec 02 12:11:39 crc kubenswrapper[4721]: I1202 12:11:39.073789 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/fc0b7803-607e-4bc1-8268-a919c487ae58-etc-swift\") pod \"fc0b7803-607e-4bc1-8268-a919c487ae58\" (UID: \"fc0b7803-607e-4bc1-8268-a919c487ae58\") " Dec 02 12:11:39 crc kubenswrapper[4721]: I1202 12:11:39.073848 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/fc0b7803-607e-4bc1-8268-a919c487ae58-ring-data-devices\") pod \"fc0b7803-607e-4bc1-8268-a919c487ae58\" (UID: \"fc0b7803-607e-4bc1-8268-a919c487ae58\") " Dec 02 12:11:39 crc kubenswrapper[4721]: I1202 12:11:39.073882 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/fc0b7803-607e-4bc1-8268-a919c487ae58-dispersionconf\") pod \"fc0b7803-607e-4bc1-8268-a919c487ae58\" (UID: \"fc0b7803-607e-4bc1-8268-a919c487ae58\") " Dec 02 12:11:39 crc kubenswrapper[4721]: I1202 12:11:39.074426 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc0b7803-607e-4bc1-8268-a919c487ae58-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "fc0b7803-607e-4bc1-8268-a919c487ae58" (UID: "fc0b7803-607e-4bc1-8268-a919c487ae58"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:11:39 crc kubenswrapper[4721]: I1202 12:11:39.074674 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc0b7803-607e-4bc1-8268-a919c487ae58-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "fc0b7803-607e-4bc1-8268-a919c487ae58" (UID: "fc0b7803-607e-4bc1-8268-a919c487ae58"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:11:39 crc kubenswrapper[4721]: I1202 12:11:39.080083 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc0b7803-607e-4bc1-8268-a919c487ae58-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "fc0b7803-607e-4bc1-8268-a919c487ae58" (UID: "fc0b7803-607e-4bc1-8268-a919c487ae58"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:11:39 crc kubenswrapper[4721]: I1202 12:11:39.088456 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc0b7803-607e-4bc1-8268-a919c487ae58-kube-api-access-mmtxr" (OuterVolumeSpecName: "kube-api-access-mmtxr") pod "fc0b7803-607e-4bc1-8268-a919c487ae58" (UID: "fc0b7803-607e-4bc1-8268-a919c487ae58"). InnerVolumeSpecName "kube-api-access-mmtxr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:11:39 crc kubenswrapper[4721]: I1202 12:11:39.090485 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc0b7803-607e-4bc1-8268-a919c487ae58-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "fc0b7803-607e-4bc1-8268-a919c487ae58" (UID: "fc0b7803-607e-4bc1-8268-a919c487ae58"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:11:39 crc kubenswrapper[4721]: I1202 12:11:39.096792 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc0b7803-607e-4bc1-8268-a919c487ae58-scripts" (OuterVolumeSpecName: "scripts") pod "fc0b7803-607e-4bc1-8268-a919c487ae58" (UID: "fc0b7803-607e-4bc1-8268-a919c487ae58"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:11:39 crc kubenswrapper[4721]: I1202 12:11:39.174935 4721 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/fc0b7803-607e-4bc1-8268-a919c487ae58-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:11:39 crc kubenswrapper[4721]: I1202 12:11:39.175169 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mmtxr\" (UniqueName: \"kubernetes.io/projected/fc0b7803-607e-4bc1-8268-a919c487ae58-kube-api-access-mmtxr\") on node \"crc\" DevicePath \"\"" Dec 02 12:11:39 crc kubenswrapper[4721]: I1202 12:11:39.175236 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fc0b7803-607e-4bc1-8268-a919c487ae58-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:11:39 crc kubenswrapper[4721]: I1202 12:11:39.175304 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/fc0b7803-607e-4bc1-8268-a919c487ae58-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:11:39 crc kubenswrapper[4721]: I1202 12:11:39.175381 4721 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/fc0b7803-607e-4bc1-8268-a919c487ae58-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:11:39 crc kubenswrapper[4721]: I1202 12:11:39.175456 4721 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/fc0b7803-607e-4bc1-8268-a919c487ae58-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 02 12:11:39 crc kubenswrapper[4721]: I1202 12:11:39.693485 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-dqs75" event={"ID":"fc0b7803-607e-4bc1-8268-a919c487ae58","Type":"ContainerDied","Data":"9cef47b9827699ae15376730619a42f66cdf67cf5d39bcb2add288e6221ebcf4"} Dec 02 12:11:39 crc kubenswrapper[4721]: I1202 12:11:39.694131 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9cef47b9827699ae15376730619a42f66cdf67cf5d39bcb2add288e6221ebcf4" Dec 02 12:11:39 crc kubenswrapper[4721]: I1202 12:11:39.693597 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-dqs75" Dec 02 12:11:42 crc kubenswrapper[4721]: I1202 12:11:42.336540 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/abd15e9e-08cc-451c-903a-09250321067c-etc-swift\") pod \"swift-storage-0\" (UID: \"abd15e9e-08cc-451c-903a-09250321067c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:11:42 crc kubenswrapper[4721]: I1202 12:11:42.344245 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/abd15e9e-08cc-451c-903a-09250321067c-etc-swift\") pod \"swift-storage-0\" (UID: \"abd15e9e-08cc-451c-903a-09250321067c\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:11:42 crc kubenswrapper[4721]: I1202 12:11:42.396793 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:11:42 crc kubenswrapper[4721]: I1202 12:11:42.837192 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:11:43 crc kubenswrapper[4721]: I1202 12:11:43.740194 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerStarted","Data":"ed051aea4154d9418fb64a82e565ae3d3d7984f5b5598da57baf244fe16f0e2d"} Dec 02 12:11:43 crc kubenswrapper[4721]: I1202 12:11:43.740769 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerStarted","Data":"b0e5efdedbd1f5fabe8998107f2587abe4734f6390787cc1e4e744765e6b4416"} Dec 02 12:11:43 crc kubenswrapper[4721]: I1202 12:11:43.740780 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerStarted","Data":"1c735c7457a2fd9bcc93e8d51ec6e1196f1e87213b7ceb3888d75ea35c500809"} Dec 02 12:11:43 crc kubenswrapper[4721]: I1202 12:11:43.740788 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerStarted","Data":"33a93293a5e6d46fbaa37920212e7c74478e7df95940b9b1755ff8f9857cf75b"} Dec 02 12:11:43 crc kubenswrapper[4721]: I1202 12:11:43.740796 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerStarted","Data":"450139c98f8a059d63d01f895ec458a14cf2dd6bd7a8413db68f33900cd39268"} Dec 02 12:11:43 crc kubenswrapper[4721]: I1202 12:11:43.740806 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerStarted","Data":"5bb187cdc2fa33ef6bcbc8229c85e0e3024c7a60613a8708957d4b86b8be6e95"} Dec 02 12:11:43 crc kubenswrapper[4721]: I1202 12:11:43.740816 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerStarted","Data":"954f9eef21487dab0c53d6de6385170b145616ea67d1fdaff049ddafe769689f"} Dec 02 12:11:44 crc kubenswrapper[4721]: I1202 12:11:44.752202 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerStarted","Data":"628e2ea957b2e937b293ba93b734911e730d56d108809fe05e1688b2f9353e2b"} Dec 02 12:11:44 crc kubenswrapper[4721]: I1202 12:11:44.752252 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerStarted","Data":"97e6d994028248d3b24bc8277f9289824a5910b07e7de150598a88504cd769b7"} Dec 02 12:11:44 crc kubenswrapper[4721]: I1202 12:11:44.752264 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerStarted","Data":"4186a237272b72b6155c9ef4e9a70b2c12aa6d1fb15164ae0894bce037795924"} Dec 02 12:11:44 crc kubenswrapper[4721]: I1202 12:11:44.752273 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerStarted","Data":"771040ed153d1ea3dbd1bc88ba9f26ddd441e46bbff0089ee6cac94e90bac57c"} Dec 02 12:11:44 crc kubenswrapper[4721]: I1202 12:11:44.752284 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerStarted","Data":"3a6693d4d449985ba1e91f61dc8c74bacb7d85efe40dc0fcdf44947f33ef8eb0"} Dec 02 12:11:44 crc kubenswrapper[4721]: I1202 12:11:44.752294 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerStarted","Data":"ef989123761d323f09877e23df927ef8328626d67fc841d36df3b4c2fe983f1e"} Dec 02 12:11:44 crc kubenswrapper[4721]: I1202 12:11:44.752304 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerStarted","Data":"127d17d32a0876ca28fb0aa7c7bf329e19d6ba3dd60035a33df7a754b979fbc4"} Dec 02 12:11:44 crc kubenswrapper[4721]: I1202 12:11:44.752313 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerStarted","Data":"c1ac0005a2eec5bb71049886a8888df3b036491c3bb2c173a455fa754615bd56"} Dec 02 12:11:45 crc kubenswrapper[4721]: I1202 12:11:45.764412 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerStarted","Data":"0dbb6e125a022d781ba4771a7e6e8da34453bcadf734863dadb35308bc8ed993"} Dec 02 12:11:45 crc kubenswrapper[4721]: I1202 12:11:45.764760 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerStarted","Data":"029bae74661c008a7bd2495205c5b40e47b7ed126d66e720bd751eca278f8a64"} Dec 02 12:11:45 crc kubenswrapper[4721]: I1202 12:11:45.798877 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-storage-0" podStartSLOduration=20.798857809 podStartE2EDuration="20.798857809s" podCreationTimestamp="2025-12-02 12:11:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:11:45.796832547 +0000 UTC m=+1235.535382936" watchObservedRunningTime="2025-12-02 12:11:45.798857809 +0000 UTC m=+1235.537408158" Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.528886 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-proxy-7fff86655-lrpjm"] Dec 02 12:11:55 crc kubenswrapper[4721]: E1202 12:11:55.530014 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc0b7803-607e-4bc1-8268-a919c487ae58" containerName="swift-ring-rebalance" Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.530038 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc0b7803-607e-4bc1-8268-a919c487ae58" containerName="swift-ring-rebalance" Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.530274 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc0b7803-607e-4bc1-8268-a919c487ae58" containerName="swift-ring-rebalance" Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.531465 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.536765 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-proxy-config-data" Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.539286 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-proxy-7fff86655-lrpjm"] Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.614468 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e8b5dbed-98a7-408e-89fa-9de1000db0fd-etc-swift\") pod \"swift-proxy-7fff86655-lrpjm\" (UID: \"e8b5dbed-98a7-408e-89fa-9de1000db0fd\") " pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.614529 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25gj8\" (UniqueName: \"kubernetes.io/projected/e8b5dbed-98a7-408e-89fa-9de1000db0fd-kube-api-access-25gj8\") pod \"swift-proxy-7fff86655-lrpjm\" (UID: \"e8b5dbed-98a7-408e-89fa-9de1000db0fd\") " pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.614571 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e8b5dbed-98a7-408e-89fa-9de1000db0fd-log-httpd\") pod \"swift-proxy-7fff86655-lrpjm\" (UID: \"e8b5dbed-98a7-408e-89fa-9de1000db0fd\") " pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.614790 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e8b5dbed-98a7-408e-89fa-9de1000db0fd-run-httpd\") pod \"swift-proxy-7fff86655-lrpjm\" (UID: \"e8b5dbed-98a7-408e-89fa-9de1000db0fd\") " pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.614922 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8b5dbed-98a7-408e-89fa-9de1000db0fd-config-data\") pod \"swift-proxy-7fff86655-lrpjm\" (UID: \"e8b5dbed-98a7-408e-89fa-9de1000db0fd\") " pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.702572 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mcr6s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.702649 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.702716 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.703652 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"53f7071ac35634b832b9710cc1dcc9432696ff76188da7c6520c40b950115114"} pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.703831 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" containerID="cri-o://53f7071ac35634b832b9710cc1dcc9432696ff76188da7c6520c40b950115114" gracePeriod=600 Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.716281 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e8b5dbed-98a7-408e-89fa-9de1000db0fd-etc-swift\") pod \"swift-proxy-7fff86655-lrpjm\" (UID: \"e8b5dbed-98a7-408e-89fa-9de1000db0fd\") " pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.716378 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25gj8\" (UniqueName: \"kubernetes.io/projected/e8b5dbed-98a7-408e-89fa-9de1000db0fd-kube-api-access-25gj8\") pod \"swift-proxy-7fff86655-lrpjm\" (UID: \"e8b5dbed-98a7-408e-89fa-9de1000db0fd\") " pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.716429 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e8b5dbed-98a7-408e-89fa-9de1000db0fd-log-httpd\") pod \"swift-proxy-7fff86655-lrpjm\" (UID: \"e8b5dbed-98a7-408e-89fa-9de1000db0fd\") " pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.716469 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e8b5dbed-98a7-408e-89fa-9de1000db0fd-run-httpd\") pod \"swift-proxy-7fff86655-lrpjm\" (UID: \"e8b5dbed-98a7-408e-89fa-9de1000db0fd\") " pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.716511 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8b5dbed-98a7-408e-89fa-9de1000db0fd-config-data\") pod \"swift-proxy-7fff86655-lrpjm\" (UID: \"e8b5dbed-98a7-408e-89fa-9de1000db0fd\") " pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.717104 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e8b5dbed-98a7-408e-89fa-9de1000db0fd-log-httpd\") pod \"swift-proxy-7fff86655-lrpjm\" (UID: \"e8b5dbed-98a7-408e-89fa-9de1000db0fd\") " pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.717254 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e8b5dbed-98a7-408e-89fa-9de1000db0fd-run-httpd\") pod \"swift-proxy-7fff86655-lrpjm\" (UID: \"e8b5dbed-98a7-408e-89fa-9de1000db0fd\") " pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.723031 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e8b5dbed-98a7-408e-89fa-9de1000db0fd-etc-swift\") pod \"swift-proxy-7fff86655-lrpjm\" (UID: \"e8b5dbed-98a7-408e-89fa-9de1000db0fd\") " pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.725896 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8b5dbed-98a7-408e-89fa-9de1000db0fd-config-data\") pod \"swift-proxy-7fff86655-lrpjm\" (UID: \"e8b5dbed-98a7-408e-89fa-9de1000db0fd\") " pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.740847 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25gj8\" (UniqueName: \"kubernetes.io/projected/e8b5dbed-98a7-408e-89fa-9de1000db0fd-kube-api-access-25gj8\") pod \"swift-proxy-7fff86655-lrpjm\" (UID: \"e8b5dbed-98a7-408e-89fa-9de1000db0fd\") " pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.856240 4721 generic.go:334] "Generic (PLEG): container finished" podID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerID="53f7071ac35634b832b9710cc1dcc9432696ff76188da7c6520c40b950115114" exitCode=0 Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.856292 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" event={"ID":"141842b6-fd0c-48e6-9795-a627e6f96bd4","Type":"ContainerDied","Data":"53f7071ac35634b832b9710cc1dcc9432696ff76188da7c6520c40b950115114"} Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.856328 4721 scope.go:117] "RemoveContainer" containerID="454276a48c0c3533be2304833299320cbec4257417769197c8e6697daf1879ef" Dec 02 12:11:55 crc kubenswrapper[4721]: I1202 12:11:55.891795 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" Dec 02 12:11:56 crc kubenswrapper[4721]: I1202 12:11:56.143863 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-proxy-7fff86655-lrpjm"] Dec 02 12:11:56 crc kubenswrapper[4721]: I1202 12:11:56.865636 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" event={"ID":"e8b5dbed-98a7-408e-89fa-9de1000db0fd","Type":"ContainerStarted","Data":"bad63ff065a83b78f743a6160204f04992feeb7ad286aa4f36d47b09d53c915a"} Dec 02 12:11:56 crc kubenswrapper[4721]: I1202 12:11:56.865969 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" event={"ID":"e8b5dbed-98a7-408e-89fa-9de1000db0fd","Type":"ContainerStarted","Data":"8286eb8f0da0edabb525ade0a77e7ba7c03541194af514ffa3d0c93f9414cc80"} Dec 02 12:11:56 crc kubenswrapper[4721]: I1202 12:11:56.865986 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" Dec 02 12:11:56 crc kubenswrapper[4721]: I1202 12:11:56.865998 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" Dec 02 12:11:56 crc kubenswrapper[4721]: I1202 12:11:56.866005 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" event={"ID":"e8b5dbed-98a7-408e-89fa-9de1000db0fd","Type":"ContainerStarted","Data":"331f9e4cb4f2d799cc69e9a91caaebdbcc0fb943d618ebe65895aa81661b8514"} Dec 02 12:11:56 crc kubenswrapper[4721]: I1202 12:11:56.868314 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" event={"ID":"141842b6-fd0c-48e6-9795-a627e6f96bd4","Type":"ContainerStarted","Data":"e66eb73009f2b3bd9c935e4952f8884d1696ff381f4f38bc986a2a57bb5b7ce6"} Dec 02 12:11:56 crc kubenswrapper[4721]: I1202 12:11:56.885007 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" podStartSLOduration=1.8849917870000001 podStartE2EDuration="1.884991787s" podCreationTimestamp="2025-12-02 12:11:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:11:56.881443349 +0000 UTC m=+1246.619993698" watchObservedRunningTime="2025-12-02 12:11:56.884991787 +0000 UTC m=+1246.623542136" Dec 02 12:12:05 crc kubenswrapper[4721]: I1202 12:12:05.894504 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" Dec 02 12:12:05 crc kubenswrapper[4721]: I1202 12:12:05.895120 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" Dec 02 12:12:07 crc kubenswrapper[4721]: I1202 12:12:07.858387 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-st7t2"] Dec 02 12:12:07 crc kubenswrapper[4721]: I1202 12:12:07.862510 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-st7t2" Dec 02 12:12:07 crc kubenswrapper[4721]: I1202 12:12:07.865733 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-st7t2"] Dec 02 12:12:07 crc kubenswrapper[4721]: I1202 12:12:07.866636 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Dec 02 12:12:07 crc kubenswrapper[4721]: I1202 12:12:07.868812 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Dec 02 12:12:07 crc kubenswrapper[4721]: I1202 12:12:07.990731 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-dispersionconf\") pod \"swift-ring-rebalance-debug-st7t2\" (UID: \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-st7t2" Dec 02 12:12:07 crc kubenswrapper[4721]: I1202 12:12:07.990792 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-etc-swift\") pod \"swift-ring-rebalance-debug-st7t2\" (UID: \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-st7t2" Dec 02 12:12:07 crc kubenswrapper[4721]: I1202 12:12:07.990824 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-ring-data-devices\") pod \"swift-ring-rebalance-debug-st7t2\" (UID: \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-st7t2" Dec 02 12:12:07 crc kubenswrapper[4721]: I1202 12:12:07.991016 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-scripts\") pod \"swift-ring-rebalance-debug-st7t2\" (UID: \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-st7t2" Dec 02 12:12:07 crc kubenswrapper[4721]: I1202 12:12:07.991060 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-swiftconf\") pod \"swift-ring-rebalance-debug-st7t2\" (UID: \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-st7t2" Dec 02 12:12:07 crc kubenswrapper[4721]: I1202 12:12:07.991151 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wn62\" (UniqueName: \"kubernetes.io/projected/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-kube-api-access-5wn62\") pod \"swift-ring-rebalance-debug-st7t2\" (UID: \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-st7t2" Dec 02 12:12:08 crc kubenswrapper[4721]: I1202 12:12:08.092848 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-ring-data-devices\") pod \"swift-ring-rebalance-debug-st7t2\" (UID: \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-st7t2" Dec 02 12:12:08 crc kubenswrapper[4721]: I1202 12:12:08.092950 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-scripts\") pod \"swift-ring-rebalance-debug-st7t2\" (UID: \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-st7t2" Dec 02 12:12:08 crc kubenswrapper[4721]: I1202 12:12:08.092977 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-swiftconf\") pod \"swift-ring-rebalance-debug-st7t2\" (UID: \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-st7t2" Dec 02 12:12:08 crc kubenswrapper[4721]: I1202 12:12:08.093042 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wn62\" (UniqueName: \"kubernetes.io/projected/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-kube-api-access-5wn62\") pod \"swift-ring-rebalance-debug-st7t2\" (UID: \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-st7t2" Dec 02 12:12:08 crc kubenswrapper[4721]: I1202 12:12:08.093118 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-dispersionconf\") pod \"swift-ring-rebalance-debug-st7t2\" (UID: \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-st7t2" Dec 02 12:12:08 crc kubenswrapper[4721]: I1202 12:12:08.093162 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-etc-swift\") pod \"swift-ring-rebalance-debug-st7t2\" (UID: \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-st7t2" Dec 02 12:12:08 crc kubenswrapper[4721]: I1202 12:12:08.094331 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-ring-data-devices\") pod \"swift-ring-rebalance-debug-st7t2\" (UID: \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-st7t2" Dec 02 12:12:08 crc kubenswrapper[4721]: I1202 12:12:08.094444 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-scripts\") pod \"swift-ring-rebalance-debug-st7t2\" (UID: \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-st7t2" Dec 02 12:12:08 crc kubenswrapper[4721]: I1202 12:12:08.094714 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-etc-swift\") pod \"swift-ring-rebalance-debug-st7t2\" (UID: \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-st7t2" Dec 02 12:12:08 crc kubenswrapper[4721]: I1202 12:12:08.098727 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-dispersionconf\") pod \"swift-ring-rebalance-debug-st7t2\" (UID: \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-st7t2" Dec 02 12:12:08 crc kubenswrapper[4721]: I1202 12:12:08.098909 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-swiftconf\") pod \"swift-ring-rebalance-debug-st7t2\" (UID: \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-st7t2" Dec 02 12:12:08 crc kubenswrapper[4721]: I1202 12:12:08.119870 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wn62\" (UniqueName: \"kubernetes.io/projected/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-kube-api-access-5wn62\") pod \"swift-ring-rebalance-debug-st7t2\" (UID: \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-st7t2" Dec 02 12:12:08 crc kubenswrapper[4721]: I1202 12:12:08.201683 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-st7t2" Dec 02 12:12:08 crc kubenswrapper[4721]: I1202 12:12:08.508388 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-st7t2"] Dec 02 12:12:08 crc kubenswrapper[4721]: I1202 12:12:08.986827 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-st7t2" event={"ID":"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba","Type":"ContainerStarted","Data":"79ef1f85e484641138ea3f722a82c788cf177f4921a5dd9dd1248bf834190ce6"} Dec 02 12:12:08 crc kubenswrapper[4721]: I1202 12:12:08.986875 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-st7t2" event={"ID":"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba","Type":"ContainerStarted","Data":"0ef1c76e1dac00c5a8ac0c59a198604dcbb9f474471e9df92bdc4c9edea1ace3"} Dec 02 12:12:11 crc kubenswrapper[4721]: I1202 12:12:11.003746 4721 generic.go:334] "Generic (PLEG): container finished" podID="0fb41f2f-dfa4-47f6-bf3f-b80120df3bba" containerID="79ef1f85e484641138ea3f722a82c788cf177f4921a5dd9dd1248bf834190ce6" exitCode=0 Dec 02 12:12:11 crc kubenswrapper[4721]: I1202 12:12:11.003808 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-st7t2" event={"ID":"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba","Type":"ContainerDied","Data":"79ef1f85e484641138ea3f722a82c788cf177f4921a5dd9dd1248bf834190ce6"} Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.324163 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-st7t2" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.359681 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-st7t2"] Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.367889 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-st7t2"] Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.466916 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-ring-data-devices\") pod \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\" (UID: \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\") " Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.467009 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5wn62\" (UniqueName: \"kubernetes.io/projected/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-kube-api-access-5wn62\") pod \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\" (UID: \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\") " Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.467104 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-swiftconf\") pod \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\" (UID: \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\") " Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.467121 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-dispersionconf\") pod \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\" (UID: \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\") " Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.467156 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-etc-swift\") pod \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\" (UID: \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\") " Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.467177 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-scripts\") pod \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\" (UID: \"0fb41f2f-dfa4-47f6-bf3f-b80120df3bba\") " Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.468574 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "0fb41f2f-dfa4-47f6-bf3f-b80120df3bba" (UID: "0fb41f2f-dfa4-47f6-bf3f-b80120df3bba"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.468648 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "0fb41f2f-dfa4-47f6-bf3f-b80120df3bba" (UID: "0fb41f2f-dfa4-47f6-bf3f-b80120df3bba"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.472788 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-kube-api-access-5wn62" (OuterVolumeSpecName: "kube-api-access-5wn62") pod "0fb41f2f-dfa4-47f6-bf3f-b80120df3bba" (UID: "0fb41f2f-dfa4-47f6-bf3f-b80120df3bba"). InnerVolumeSpecName "kube-api-access-5wn62". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.488530 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-scripts" (OuterVolumeSpecName: "scripts") pod "0fb41f2f-dfa4-47f6-bf3f-b80120df3bba" (UID: "0fb41f2f-dfa4-47f6-bf3f-b80120df3bba"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.490470 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "0fb41f2f-dfa4-47f6-bf3f-b80120df3bba" (UID: "0fb41f2f-dfa4-47f6-bf3f-b80120df3bba"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.491177 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "0fb41f2f-dfa4-47f6-bf3f-b80120df3bba" (UID: "0fb41f2f-dfa4-47f6-bf3f-b80120df3bba"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.552325 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m"] Dec 02 12:12:12 crc kubenswrapper[4721]: E1202 12:12:12.553096 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fb41f2f-dfa4-47f6-bf3f-b80120df3bba" containerName="swift-ring-rebalance" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.553113 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fb41f2f-dfa4-47f6-bf3f-b80120df3bba" containerName="swift-ring-rebalance" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.553330 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fb41f2f-dfa4-47f6-bf3f-b80120df3bba" containerName="swift-ring-rebalance" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.554061 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.560052 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m"] Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.569036 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5wn62\" (UniqueName: \"kubernetes.io/projected/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-kube-api-access-5wn62\") on node \"crc\" DevicePath \"\"" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.569263 4721 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.569277 4721 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.569290 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.569301 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.569316 4721 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.670625 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/89554ea0-5351-46f6-bd5e-f98de2afb3e7-ring-data-devices\") pod \"swift-ring-rebalance-debug-fxz9m\" (UID: \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.670920 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/89554ea0-5351-46f6-bd5e-f98de2afb3e7-dispersionconf\") pod \"swift-ring-rebalance-debug-fxz9m\" (UID: \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.671038 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/89554ea0-5351-46f6-bd5e-f98de2afb3e7-swiftconf\") pod \"swift-ring-rebalance-debug-fxz9m\" (UID: \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.671197 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/89554ea0-5351-46f6-bd5e-f98de2afb3e7-scripts\") pod \"swift-ring-rebalance-debug-fxz9m\" (UID: \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.671296 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/89554ea0-5351-46f6-bd5e-f98de2afb3e7-etc-swift\") pod \"swift-ring-rebalance-debug-fxz9m\" (UID: \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.671431 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c52w6\" (UniqueName: \"kubernetes.io/projected/89554ea0-5351-46f6-bd5e-f98de2afb3e7-kube-api-access-c52w6\") pod \"swift-ring-rebalance-debug-fxz9m\" (UID: \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.772633 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/89554ea0-5351-46f6-bd5e-f98de2afb3e7-dispersionconf\") pod \"swift-ring-rebalance-debug-fxz9m\" (UID: \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.772701 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/89554ea0-5351-46f6-bd5e-f98de2afb3e7-swiftconf\") pod \"swift-ring-rebalance-debug-fxz9m\" (UID: \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.772750 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/89554ea0-5351-46f6-bd5e-f98de2afb3e7-scripts\") pod \"swift-ring-rebalance-debug-fxz9m\" (UID: \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.772770 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/89554ea0-5351-46f6-bd5e-f98de2afb3e7-etc-swift\") pod \"swift-ring-rebalance-debug-fxz9m\" (UID: \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.772822 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c52w6\" (UniqueName: \"kubernetes.io/projected/89554ea0-5351-46f6-bd5e-f98de2afb3e7-kube-api-access-c52w6\") pod \"swift-ring-rebalance-debug-fxz9m\" (UID: \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.772895 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/89554ea0-5351-46f6-bd5e-f98de2afb3e7-ring-data-devices\") pod \"swift-ring-rebalance-debug-fxz9m\" (UID: \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.773622 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/89554ea0-5351-46f6-bd5e-f98de2afb3e7-etc-swift\") pod \"swift-ring-rebalance-debug-fxz9m\" (UID: \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.773944 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/89554ea0-5351-46f6-bd5e-f98de2afb3e7-scripts\") pod \"swift-ring-rebalance-debug-fxz9m\" (UID: \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.774218 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/89554ea0-5351-46f6-bd5e-f98de2afb3e7-ring-data-devices\") pod \"swift-ring-rebalance-debug-fxz9m\" (UID: \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.776922 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/89554ea0-5351-46f6-bd5e-f98de2afb3e7-swiftconf\") pod \"swift-ring-rebalance-debug-fxz9m\" (UID: \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.785921 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/89554ea0-5351-46f6-bd5e-f98de2afb3e7-dispersionconf\") pod \"swift-ring-rebalance-debug-fxz9m\" (UID: \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.788274 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c52w6\" (UniqueName: \"kubernetes.io/projected/89554ea0-5351-46f6-bd5e-f98de2afb3e7-kube-api-access-c52w6\") pod \"swift-ring-rebalance-debug-fxz9m\" (UID: \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.848527 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fb41f2f-dfa4-47f6-bf3f-b80120df3bba" path="/var/lib/kubelet/pods/0fb41f2f-dfa4-47f6-bf3f-b80120df3bba/volumes" Dec 02 12:12:12 crc kubenswrapper[4721]: I1202 12:12:12.892561 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m" Dec 02 12:12:13 crc kubenswrapper[4721]: I1202 12:12:13.029468 4721 scope.go:117] "RemoveContainer" containerID="79ef1f85e484641138ea3f722a82c788cf177f4921a5dd9dd1248bf834190ce6" Dec 02 12:12:13 crc kubenswrapper[4721]: I1202 12:12:13.029534 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-st7t2" Dec 02 12:12:13 crc kubenswrapper[4721]: I1202 12:12:13.313623 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m"] Dec 02 12:12:13 crc kubenswrapper[4721]: W1202 12:12:13.317154 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod89554ea0_5351_46f6_bd5e_f98de2afb3e7.slice/crio-813cbbf7bb95d605ac6ca0ea5b366f9d28621ca879d161a18f7f6407646700c1 WatchSource:0}: Error finding container 813cbbf7bb95d605ac6ca0ea5b366f9d28621ca879d161a18f7f6407646700c1: Status 404 returned error can't find the container with id 813cbbf7bb95d605ac6ca0ea5b366f9d28621ca879d161a18f7f6407646700c1 Dec 02 12:12:14 crc kubenswrapper[4721]: I1202 12:12:14.039511 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m" event={"ID":"89554ea0-5351-46f6-bd5e-f98de2afb3e7","Type":"ContainerStarted","Data":"08cb0f8471fb31fa4731981f56efe1a775af69f5547c9bd7cf193ad7e5eddcdb"} Dec 02 12:12:14 crc kubenswrapper[4721]: I1202 12:12:14.039572 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m" event={"ID":"89554ea0-5351-46f6-bd5e-f98de2afb3e7","Type":"ContainerStarted","Data":"813cbbf7bb95d605ac6ca0ea5b366f9d28621ca879d161a18f7f6407646700c1"} Dec 02 12:12:14 crc kubenswrapper[4721]: I1202 12:12:14.058908 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m" podStartSLOduration=2.058889218 podStartE2EDuration="2.058889218s" podCreationTimestamp="2025-12-02 12:12:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:12:14.057552346 +0000 UTC m=+1263.796102695" watchObservedRunningTime="2025-12-02 12:12:14.058889218 +0000 UTC m=+1263.797439567" Dec 02 12:12:15 crc kubenswrapper[4721]: I1202 12:12:15.049073 4721 generic.go:334] "Generic (PLEG): container finished" podID="89554ea0-5351-46f6-bd5e-f98de2afb3e7" containerID="08cb0f8471fb31fa4731981f56efe1a775af69f5547c9bd7cf193ad7e5eddcdb" exitCode=0 Dec 02 12:12:15 crc kubenswrapper[4721]: I1202 12:12:15.049125 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m" event={"ID":"89554ea0-5351-46f6-bd5e-f98de2afb3e7","Type":"ContainerDied","Data":"08cb0f8471fb31fa4731981f56efe1a775af69f5547c9bd7cf193ad7e5eddcdb"} Dec 02 12:12:16 crc kubenswrapper[4721]: I1202 12:12:16.432397 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m" Dec 02 12:12:16 crc kubenswrapper[4721]: I1202 12:12:16.471513 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m"] Dec 02 12:12:16 crc kubenswrapper[4721]: I1202 12:12:16.478879 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m"] Dec 02 12:12:16 crc kubenswrapper[4721]: I1202 12:12:16.540181 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/89554ea0-5351-46f6-bd5e-f98de2afb3e7-swiftconf\") pod \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\" (UID: \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\") " Dec 02 12:12:16 crc kubenswrapper[4721]: I1202 12:12:16.540284 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c52w6\" (UniqueName: \"kubernetes.io/projected/89554ea0-5351-46f6-bd5e-f98de2afb3e7-kube-api-access-c52w6\") pod \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\" (UID: \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\") " Dec 02 12:12:16 crc kubenswrapper[4721]: I1202 12:12:16.540364 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/89554ea0-5351-46f6-bd5e-f98de2afb3e7-ring-data-devices\") pod \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\" (UID: \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\") " Dec 02 12:12:16 crc kubenswrapper[4721]: I1202 12:12:16.540409 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/89554ea0-5351-46f6-bd5e-f98de2afb3e7-etc-swift\") pod \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\" (UID: \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\") " Dec 02 12:12:16 crc kubenswrapper[4721]: I1202 12:12:16.540423 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/89554ea0-5351-46f6-bd5e-f98de2afb3e7-scripts\") pod \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\" (UID: \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\") " Dec 02 12:12:16 crc kubenswrapper[4721]: I1202 12:12:16.540438 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/89554ea0-5351-46f6-bd5e-f98de2afb3e7-dispersionconf\") pod \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\" (UID: \"89554ea0-5351-46f6-bd5e-f98de2afb3e7\") " Dec 02 12:12:16 crc kubenswrapper[4721]: I1202 12:12:16.542142 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89554ea0-5351-46f6-bd5e-f98de2afb3e7-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "89554ea0-5351-46f6-bd5e-f98de2afb3e7" (UID: "89554ea0-5351-46f6-bd5e-f98de2afb3e7"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:12:16 crc kubenswrapper[4721]: I1202 12:12:16.542402 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89554ea0-5351-46f6-bd5e-f98de2afb3e7-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "89554ea0-5351-46f6-bd5e-f98de2afb3e7" (UID: "89554ea0-5351-46f6-bd5e-f98de2afb3e7"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:12:16 crc kubenswrapper[4721]: I1202 12:12:16.547249 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89554ea0-5351-46f6-bd5e-f98de2afb3e7-kube-api-access-c52w6" (OuterVolumeSpecName: "kube-api-access-c52w6") pod "89554ea0-5351-46f6-bd5e-f98de2afb3e7" (UID: "89554ea0-5351-46f6-bd5e-f98de2afb3e7"). InnerVolumeSpecName "kube-api-access-c52w6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:12:16 crc kubenswrapper[4721]: I1202 12:12:16.570132 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89554ea0-5351-46f6-bd5e-f98de2afb3e7-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "89554ea0-5351-46f6-bd5e-f98de2afb3e7" (UID: "89554ea0-5351-46f6-bd5e-f98de2afb3e7"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:12:16 crc kubenswrapper[4721]: I1202 12:12:16.575313 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89554ea0-5351-46f6-bd5e-f98de2afb3e7-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "89554ea0-5351-46f6-bd5e-f98de2afb3e7" (UID: "89554ea0-5351-46f6-bd5e-f98de2afb3e7"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:12:16 crc kubenswrapper[4721]: I1202 12:12:16.579942 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89554ea0-5351-46f6-bd5e-f98de2afb3e7-scripts" (OuterVolumeSpecName: "scripts") pod "89554ea0-5351-46f6-bd5e-f98de2afb3e7" (UID: "89554ea0-5351-46f6-bd5e-f98de2afb3e7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:12:16 crc kubenswrapper[4721]: I1202 12:12:16.641784 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c52w6\" (UniqueName: \"kubernetes.io/projected/89554ea0-5351-46f6-bd5e-f98de2afb3e7-kube-api-access-c52w6\") on node \"crc\" DevicePath \"\"" Dec 02 12:12:16 crc kubenswrapper[4721]: I1202 12:12:16.641809 4721 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/89554ea0-5351-46f6-bd5e-f98de2afb3e7-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 02 12:12:16 crc kubenswrapper[4721]: I1202 12:12:16.641818 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/89554ea0-5351-46f6-bd5e-f98de2afb3e7-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:12:16 crc kubenswrapper[4721]: I1202 12:12:16.641827 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/89554ea0-5351-46f6-bd5e-f98de2afb3e7-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:12:16 crc kubenswrapper[4721]: I1202 12:12:16.641836 4721 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/89554ea0-5351-46f6-bd5e-f98de2afb3e7-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:12:16 crc kubenswrapper[4721]: I1202 12:12:16.641845 4721 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/89554ea0-5351-46f6-bd5e-f98de2afb3e7-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:12:16 crc kubenswrapper[4721]: I1202 12:12:16.847630 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89554ea0-5351-46f6-bd5e-f98de2afb3e7" path="/var/lib/kubelet/pods/89554ea0-5351-46f6-bd5e-f98de2afb3e7/volumes" Dec 02 12:12:17 crc kubenswrapper[4721]: I1202 12:12:17.067121 4721 scope.go:117] "RemoveContainer" containerID="08cb0f8471fb31fa4731981f56efe1a775af69f5547c9bd7cf193ad7e5eddcdb" Dec 02 12:12:17 crc kubenswrapper[4721]: I1202 12:12:17.067201 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-fxz9m" Dec 02 12:12:19 crc kubenswrapper[4721]: I1202 12:12:19.210922 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz"] Dec 02 12:12:19 crc kubenswrapper[4721]: E1202 12:12:19.211559 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89554ea0-5351-46f6-bd5e-f98de2afb3e7" containerName="swift-ring-rebalance" Dec 02 12:12:19 crc kubenswrapper[4721]: I1202 12:12:19.211575 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="89554ea0-5351-46f6-bd5e-f98de2afb3e7" containerName="swift-ring-rebalance" Dec 02 12:12:19 crc kubenswrapper[4721]: I1202 12:12:19.211762 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="89554ea0-5351-46f6-bd5e-f98de2afb3e7" containerName="swift-ring-rebalance" Dec 02 12:12:19 crc kubenswrapper[4721]: I1202 12:12:19.212295 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz" Dec 02 12:12:19 crc kubenswrapper[4721]: I1202 12:12:19.214949 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Dec 02 12:12:19 crc kubenswrapper[4721]: I1202 12:12:19.221049 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Dec 02 12:12:19 crc kubenswrapper[4721]: I1202 12:12:19.222286 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz"] Dec 02 12:12:19 crc kubenswrapper[4721]: I1202 12:12:19.277449 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a32f027b-e2b7-439a-b2c4-8b8df869d99c-dispersionconf\") pod \"swift-ring-rebalance-debug-d6ztz\" (UID: \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz" Dec 02 12:12:19 crc kubenswrapper[4721]: I1202 12:12:19.277740 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a32f027b-e2b7-439a-b2c4-8b8df869d99c-swiftconf\") pod \"swift-ring-rebalance-debug-d6ztz\" (UID: \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz" Dec 02 12:12:19 crc kubenswrapper[4721]: I1202 12:12:19.277839 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a32f027b-e2b7-439a-b2c4-8b8df869d99c-etc-swift\") pod \"swift-ring-rebalance-debug-d6ztz\" (UID: \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz" Dec 02 12:12:19 crc kubenswrapper[4721]: I1202 12:12:19.277964 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a32f027b-e2b7-439a-b2c4-8b8df869d99c-ring-data-devices\") pod \"swift-ring-rebalance-debug-d6ztz\" (UID: \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz" Dec 02 12:12:19 crc kubenswrapper[4721]: I1202 12:12:19.278085 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a32f027b-e2b7-439a-b2c4-8b8df869d99c-scripts\") pod \"swift-ring-rebalance-debug-d6ztz\" (UID: \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz" Dec 02 12:12:19 crc kubenswrapper[4721]: I1202 12:12:19.278233 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sp79k\" (UniqueName: \"kubernetes.io/projected/a32f027b-e2b7-439a-b2c4-8b8df869d99c-kube-api-access-sp79k\") pod \"swift-ring-rebalance-debug-d6ztz\" (UID: \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz" Dec 02 12:12:19 crc kubenswrapper[4721]: I1202 12:12:19.379738 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a32f027b-e2b7-439a-b2c4-8b8df869d99c-swiftconf\") pod \"swift-ring-rebalance-debug-d6ztz\" (UID: \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz" Dec 02 12:12:19 crc kubenswrapper[4721]: I1202 12:12:19.379791 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a32f027b-e2b7-439a-b2c4-8b8df869d99c-etc-swift\") pod \"swift-ring-rebalance-debug-d6ztz\" (UID: \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz" Dec 02 12:12:19 crc kubenswrapper[4721]: I1202 12:12:19.379845 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a32f027b-e2b7-439a-b2c4-8b8df869d99c-ring-data-devices\") pod \"swift-ring-rebalance-debug-d6ztz\" (UID: \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz" Dec 02 12:12:19 crc kubenswrapper[4721]: I1202 12:12:19.379869 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a32f027b-e2b7-439a-b2c4-8b8df869d99c-scripts\") pod \"swift-ring-rebalance-debug-d6ztz\" (UID: \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz" Dec 02 12:12:19 crc kubenswrapper[4721]: I1202 12:12:19.379920 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sp79k\" (UniqueName: \"kubernetes.io/projected/a32f027b-e2b7-439a-b2c4-8b8df869d99c-kube-api-access-sp79k\") pod \"swift-ring-rebalance-debug-d6ztz\" (UID: \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz" Dec 02 12:12:19 crc kubenswrapper[4721]: I1202 12:12:19.379966 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a32f027b-e2b7-439a-b2c4-8b8df869d99c-dispersionconf\") pod \"swift-ring-rebalance-debug-d6ztz\" (UID: \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz" Dec 02 12:12:19 crc kubenswrapper[4721]: I1202 12:12:19.380679 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a32f027b-e2b7-439a-b2c4-8b8df869d99c-etc-swift\") pod \"swift-ring-rebalance-debug-d6ztz\" (UID: \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz" Dec 02 12:12:19 crc kubenswrapper[4721]: I1202 12:12:19.381252 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a32f027b-e2b7-439a-b2c4-8b8df869d99c-scripts\") pod \"swift-ring-rebalance-debug-d6ztz\" (UID: \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz" Dec 02 12:12:19 crc kubenswrapper[4721]: I1202 12:12:19.381473 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a32f027b-e2b7-439a-b2c4-8b8df869d99c-ring-data-devices\") pod \"swift-ring-rebalance-debug-d6ztz\" (UID: \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz" Dec 02 12:12:19 crc kubenswrapper[4721]: I1202 12:12:19.386602 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a32f027b-e2b7-439a-b2c4-8b8df869d99c-swiftconf\") pod \"swift-ring-rebalance-debug-d6ztz\" (UID: \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz" Dec 02 12:12:19 crc kubenswrapper[4721]: I1202 12:12:19.391633 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a32f027b-e2b7-439a-b2c4-8b8df869d99c-dispersionconf\") pod \"swift-ring-rebalance-debug-d6ztz\" (UID: \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz" Dec 02 12:12:19 crc kubenswrapper[4721]: I1202 12:12:19.407496 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sp79k\" (UniqueName: \"kubernetes.io/projected/a32f027b-e2b7-439a-b2c4-8b8df869d99c-kube-api-access-sp79k\") pod \"swift-ring-rebalance-debug-d6ztz\" (UID: \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz" Dec 02 12:12:19 crc kubenswrapper[4721]: I1202 12:12:19.533305 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz" Dec 02 12:12:20 crc kubenswrapper[4721]: I1202 12:12:20.615026 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz"] Dec 02 12:12:21 crc kubenswrapper[4721]: I1202 12:12:21.098555 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz" event={"ID":"a32f027b-e2b7-439a-b2c4-8b8df869d99c","Type":"ContainerStarted","Data":"489335ac195dc047e4f2b61ca4ae7b3d1810370d114a16f7033f42068f35364d"} Dec 02 12:12:21 crc kubenswrapper[4721]: I1202 12:12:21.098595 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz" event={"ID":"a32f027b-e2b7-439a-b2c4-8b8df869d99c","Type":"ContainerStarted","Data":"615782be0f02dba34e47606532e9449bae7872288e6dac76cafd5ec5a1f7d50e"} Dec 02 12:12:21 crc kubenswrapper[4721]: I1202 12:12:21.122005 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz" podStartSLOduration=2.121980596 podStartE2EDuration="2.121980596s" podCreationTimestamp="2025-12-02 12:12:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:12:21.118981944 +0000 UTC m=+1270.857532303" watchObservedRunningTime="2025-12-02 12:12:21.121980596 +0000 UTC m=+1270.860530945" Dec 02 12:12:22 crc kubenswrapper[4721]: I1202 12:12:22.107218 4721 generic.go:334] "Generic (PLEG): container finished" podID="a32f027b-e2b7-439a-b2c4-8b8df869d99c" containerID="489335ac195dc047e4f2b61ca4ae7b3d1810370d114a16f7033f42068f35364d" exitCode=0 Dec 02 12:12:22 crc kubenswrapper[4721]: I1202 12:12:22.107330 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz" event={"ID":"a32f027b-e2b7-439a-b2c4-8b8df869d99c","Type":"ContainerDied","Data":"489335ac195dc047e4f2b61ca4ae7b3d1810370d114a16f7033f42068f35364d"} Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.443837 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz" Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.481780 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz"] Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.488675 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz"] Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.543693 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a32f027b-e2b7-439a-b2c4-8b8df869d99c-dispersionconf\") pod \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\" (UID: \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\") " Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.543750 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a32f027b-e2b7-439a-b2c4-8b8df869d99c-scripts\") pod \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\" (UID: \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\") " Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.543813 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sp79k\" (UniqueName: \"kubernetes.io/projected/a32f027b-e2b7-439a-b2c4-8b8df869d99c-kube-api-access-sp79k\") pod \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\" (UID: \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\") " Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.543895 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a32f027b-e2b7-439a-b2c4-8b8df869d99c-swiftconf\") pod \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\" (UID: \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\") " Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.543990 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a32f027b-e2b7-439a-b2c4-8b8df869d99c-ring-data-devices\") pod \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\" (UID: \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\") " Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.544046 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a32f027b-e2b7-439a-b2c4-8b8df869d99c-etc-swift\") pod \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\" (UID: \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\") " Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.545276 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a32f027b-e2b7-439a-b2c4-8b8df869d99c-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "a32f027b-e2b7-439a-b2c4-8b8df869d99c" (UID: "a32f027b-e2b7-439a-b2c4-8b8df869d99c"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.545787 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a32f027b-e2b7-439a-b2c4-8b8df869d99c-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "a32f027b-e2b7-439a-b2c4-8b8df869d99c" (UID: "a32f027b-e2b7-439a-b2c4-8b8df869d99c"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.549099 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a32f027b-e2b7-439a-b2c4-8b8df869d99c-kube-api-access-sp79k" (OuterVolumeSpecName: "kube-api-access-sp79k") pod "a32f027b-e2b7-439a-b2c4-8b8df869d99c" (UID: "a32f027b-e2b7-439a-b2c4-8b8df869d99c"). InnerVolumeSpecName "kube-api-access-sp79k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.586089 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a32f027b-e2b7-439a-b2c4-8b8df869d99c-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "a32f027b-e2b7-439a-b2c4-8b8df869d99c" (UID: "a32f027b-e2b7-439a-b2c4-8b8df869d99c"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.619431 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-dqs75"] Dec 02 12:12:23 crc kubenswrapper[4721]: E1202 12:12:23.628954 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a32f027b-e2b7-439a-b2c4-8b8df869d99c-dispersionconf podName:a32f027b-e2b7-439a-b2c4-8b8df869d99c nodeName:}" failed. No retries permitted until 2025-12-02 12:12:24.128931362 +0000 UTC m=+1273.867481711 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "dispersionconf" (UniqueName: "kubernetes.io/secret/a32f027b-e2b7-439a-b2c4-8b8df869d99c-dispersionconf") pod "a32f027b-e2b7-439a-b2c4-8b8df869d99c" (UID: "a32f027b-e2b7-439a-b2c4-8b8df869d99c") : error deleting /var/lib/kubelet/pods/a32f027b-e2b7-439a-b2c4-8b8df869d99c/volume-subpaths: remove /var/lib/kubelet/pods/a32f027b-e2b7-439a-b2c4-8b8df869d99c/volume-subpaths: no such file or directory Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.629418 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a32f027b-e2b7-439a-b2c4-8b8df869d99c-scripts" (OuterVolumeSpecName: "scripts") pod "a32f027b-e2b7-439a-b2c4-8b8df869d99c" (UID: "a32f027b-e2b7-439a-b2c4-8b8df869d99c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.631224 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-dqs75"] Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.638635 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.639197 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="account-server" containerID="cri-o://5bb187cdc2fa33ef6bcbc8229c85e0e3024c7a60613a8708957d4b86b8be6e95" gracePeriod=30 Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.639230 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="object-replicator" containerID="cri-o://3a6693d4d449985ba1e91f61dc8c74bacb7d85efe40dc0fcdf44947f33ef8eb0" gracePeriod=30 Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.639271 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="container-replicator" containerID="cri-o://ed051aea4154d9418fb64a82e565ae3d3d7984f5b5598da57baf244fe16f0e2d" gracePeriod=30 Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.639333 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="container-server" containerID="cri-o://b0e5efdedbd1f5fabe8998107f2587abe4734f6390787cc1e4e744765e6b4416" gracePeriod=30 Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.639361 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="object-server" containerID="cri-o://ef989123761d323f09877e23df927ef8328626d67fc841d36df3b4c2fe983f1e" gracePeriod=30 Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.639422 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="account-reaper" containerID="cri-o://1c735c7457a2fd9bcc93e8d51ec6e1196f1e87213b7ceb3888d75ea35c500809" gracePeriod=30 Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.639432 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="container-updater" containerID="cri-o://127d17d32a0876ca28fb0aa7c7bf329e19d6ba3dd60035a33df7a754b979fbc4" gracePeriod=30 Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.639464 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="account-auditor" containerID="cri-o://33a93293a5e6d46fbaa37920212e7c74478e7df95940b9b1755ff8f9857cf75b" gracePeriod=30 Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.639467 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="container-auditor" containerID="cri-o://c1ac0005a2eec5bb71049886a8888df3b036491c3bb2c173a455fa754615bd56" gracePeriod=30 Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.639511 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="rsync" containerID="cri-o://628e2ea957b2e937b293ba93b734911e730d56d108809fe05e1688b2f9353e2b" gracePeriod=30 Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.639529 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="account-replicator" containerID="cri-o://450139c98f8a059d63d01f895ec458a14cf2dd6bd7a8413db68f33900cd39268" gracePeriod=30 Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.639547 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="container-sharder" containerID="cri-o://0dbb6e125a022d781ba4771a7e6e8da34453bcadf734863dadb35308bc8ed993" gracePeriod=30 Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.639578 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="swift-recon-cron" containerID="cri-o://029bae74661c008a7bd2495205c5b40e47b7ed126d66e720bd751eca278f8a64" gracePeriod=30 Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.639649 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="object-updater" containerID="cri-o://4186a237272b72b6155c9ef4e9a70b2c12aa6d1fb15164ae0894bce037795924" gracePeriod=30 Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.639731 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="object-expirer" containerID="cri-o://97e6d994028248d3b24bc8277f9289824a5910b07e7de150598a88504cd769b7" gracePeriod=30 Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.640642 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="object-auditor" containerID="cri-o://771040ed153d1ea3dbd1bc88ba9f26ddd441e46bbff0089ee6cac94e90bac57c" gracePeriod=30 Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.645435 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sp79k\" (UniqueName: \"kubernetes.io/projected/a32f027b-e2b7-439a-b2c4-8b8df869d99c-kube-api-access-sp79k\") on node \"crc\" DevicePath \"\"" Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.645453 4721 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a32f027b-e2b7-439a-b2c4-8b8df869d99c-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.645462 4721 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a32f027b-e2b7-439a-b2c4-8b8df869d99c-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.645469 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a32f027b-e2b7-439a-b2c4-8b8df869d99c-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.645478 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a32f027b-e2b7-439a-b2c4-8b8df869d99c-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.659793 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-proxy-7fff86655-lrpjm"] Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.661266 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" podUID="e8b5dbed-98a7-408e-89fa-9de1000db0fd" containerName="proxy-httpd" containerID="cri-o://8286eb8f0da0edabb525ade0a77e7ba7c03541194af514ffa3d0c93f9414cc80" gracePeriod=30 Dec 02 12:12:23 crc kubenswrapper[4721]: I1202 12:12:23.661420 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" podUID="e8b5dbed-98a7-408e-89fa-9de1000db0fd" containerName="proxy-server" containerID="cri-o://bad63ff065a83b78f743a6160204f04992feeb7ad286aa4f36d47b09d53c915a" gracePeriod=30 Dec 02 12:12:23 crc kubenswrapper[4721]: E1202 12:12:23.792986 4721 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podabd15e9e_08cc_451c_903a_09250321067c.slice/crio-conmon-33a93293a5e6d46fbaa37920212e7c74478e7df95940b9b1755ff8f9857cf75b.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podabd15e9e_08cc_451c_903a_09250321067c.slice/crio-4186a237272b72b6155c9ef4e9a70b2c12aa6d1fb15164ae0894bce037795924.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podabd15e9e_08cc_451c_903a_09250321067c.slice/crio-conmon-127d17d32a0876ca28fb0aa7c7bf329e19d6ba3dd60035a33df7a754b979fbc4.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podabd15e9e_08cc_451c_903a_09250321067c.slice/crio-1c735c7457a2fd9bcc93e8d51ec6e1196f1e87213b7ceb3888d75ea35c500809.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podabd15e9e_08cc_451c_903a_09250321067c.slice/crio-3a6693d4d449985ba1e91f61dc8c74bacb7d85efe40dc0fcdf44947f33ef8eb0.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podabd15e9e_08cc_451c_903a_09250321067c.slice/crio-127d17d32a0876ca28fb0aa7c7bf329e19d6ba3dd60035a33df7a754b979fbc4.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podabd15e9e_08cc_451c_903a_09250321067c.slice/crio-97e6d994028248d3b24bc8277f9289824a5910b07e7de150598a88504cd769b7.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podabd15e9e_08cc_451c_903a_09250321067c.slice/crio-conmon-4186a237272b72b6155c9ef4e9a70b2c12aa6d1fb15164ae0894bce037795924.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podabd15e9e_08cc_451c_903a_09250321067c.slice/crio-conmon-3a6693d4d449985ba1e91f61dc8c74bacb7d85efe40dc0fcdf44947f33ef8eb0.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podabd15e9e_08cc_451c_903a_09250321067c.slice/crio-c1ac0005a2eec5bb71049886a8888df3b036491c3bb2c173a455fa754615bd56.scope\": RecentStats: unable to find data in memory cache]" Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.125071 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="615782be0f02dba34e47606532e9449bae7872288e6dac76cafd5ec5a1f7d50e" Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.125372 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-d6ztz" Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.134266 4721 generic.go:334] "Generic (PLEG): container finished" podID="abd15e9e-08cc-451c-903a-09250321067c" containerID="0dbb6e125a022d781ba4771a7e6e8da34453bcadf734863dadb35308bc8ed993" exitCode=0 Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.134298 4721 generic.go:334] "Generic (PLEG): container finished" podID="abd15e9e-08cc-451c-903a-09250321067c" containerID="97e6d994028248d3b24bc8277f9289824a5910b07e7de150598a88504cd769b7" exitCode=0 Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.134310 4721 generic.go:334] "Generic (PLEG): container finished" podID="abd15e9e-08cc-451c-903a-09250321067c" containerID="4186a237272b72b6155c9ef4e9a70b2c12aa6d1fb15164ae0894bce037795924" exitCode=0 Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.134319 4721 generic.go:334] "Generic (PLEG): container finished" podID="abd15e9e-08cc-451c-903a-09250321067c" containerID="771040ed153d1ea3dbd1bc88ba9f26ddd441e46bbff0089ee6cac94e90bac57c" exitCode=0 Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.134329 4721 generic.go:334] "Generic (PLEG): container finished" podID="abd15e9e-08cc-451c-903a-09250321067c" containerID="3a6693d4d449985ba1e91f61dc8c74bacb7d85efe40dc0fcdf44947f33ef8eb0" exitCode=0 Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.134350 4721 generic.go:334] "Generic (PLEG): container finished" podID="abd15e9e-08cc-451c-903a-09250321067c" containerID="127d17d32a0876ca28fb0aa7c7bf329e19d6ba3dd60035a33df7a754b979fbc4" exitCode=0 Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.134359 4721 generic.go:334] "Generic (PLEG): container finished" podID="abd15e9e-08cc-451c-903a-09250321067c" containerID="c1ac0005a2eec5bb71049886a8888df3b036491c3bb2c173a455fa754615bd56" exitCode=0 Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.134367 4721 generic.go:334] "Generic (PLEG): container finished" podID="abd15e9e-08cc-451c-903a-09250321067c" containerID="ed051aea4154d9418fb64a82e565ae3d3d7984f5b5598da57baf244fe16f0e2d" exitCode=0 Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.134374 4721 generic.go:334] "Generic (PLEG): container finished" podID="abd15e9e-08cc-451c-903a-09250321067c" containerID="1c735c7457a2fd9bcc93e8d51ec6e1196f1e87213b7ceb3888d75ea35c500809" exitCode=0 Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.134383 4721 generic.go:334] "Generic (PLEG): container finished" podID="abd15e9e-08cc-451c-903a-09250321067c" containerID="33a93293a5e6d46fbaa37920212e7c74478e7df95940b9b1755ff8f9857cf75b" exitCode=0 Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.134391 4721 generic.go:334] "Generic (PLEG): container finished" podID="abd15e9e-08cc-451c-903a-09250321067c" containerID="450139c98f8a059d63d01f895ec458a14cf2dd6bd7a8413db68f33900cd39268" exitCode=0 Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.134432 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerDied","Data":"0dbb6e125a022d781ba4771a7e6e8da34453bcadf734863dadb35308bc8ed993"} Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.134464 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerDied","Data":"97e6d994028248d3b24bc8277f9289824a5910b07e7de150598a88504cd769b7"} Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.134478 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerDied","Data":"4186a237272b72b6155c9ef4e9a70b2c12aa6d1fb15164ae0894bce037795924"} Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.134490 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerDied","Data":"771040ed153d1ea3dbd1bc88ba9f26ddd441e46bbff0089ee6cac94e90bac57c"} Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.134502 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerDied","Data":"3a6693d4d449985ba1e91f61dc8c74bacb7d85efe40dc0fcdf44947f33ef8eb0"} Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.134514 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerDied","Data":"127d17d32a0876ca28fb0aa7c7bf329e19d6ba3dd60035a33df7a754b979fbc4"} Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.134525 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerDied","Data":"c1ac0005a2eec5bb71049886a8888df3b036491c3bb2c173a455fa754615bd56"} Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.134536 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerDied","Data":"ed051aea4154d9418fb64a82e565ae3d3d7984f5b5598da57baf244fe16f0e2d"} Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.134547 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerDied","Data":"1c735c7457a2fd9bcc93e8d51ec6e1196f1e87213b7ceb3888d75ea35c500809"} Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.134557 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerDied","Data":"33a93293a5e6d46fbaa37920212e7c74478e7df95940b9b1755ff8f9857cf75b"} Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.134568 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerDied","Data":"450139c98f8a059d63d01f895ec458a14cf2dd6bd7a8413db68f33900cd39268"} Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.140042 4721 generic.go:334] "Generic (PLEG): container finished" podID="e8b5dbed-98a7-408e-89fa-9de1000db0fd" containerID="8286eb8f0da0edabb525ade0a77e7ba7c03541194af514ffa3d0c93f9414cc80" exitCode=0 Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.140069 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" event={"ID":"e8b5dbed-98a7-408e-89fa-9de1000db0fd","Type":"ContainerDied","Data":"8286eb8f0da0edabb525ade0a77e7ba7c03541194af514ffa3d0c93f9414cc80"} Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.152213 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a32f027b-e2b7-439a-b2c4-8b8df869d99c-dispersionconf\") pod \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\" (UID: \"a32f027b-e2b7-439a-b2c4-8b8df869d99c\") " Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.156724 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a32f027b-e2b7-439a-b2c4-8b8df869d99c-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "a32f027b-e2b7-439a-b2c4-8b8df869d99c" (UID: "a32f027b-e2b7-439a-b2c4-8b8df869d99c"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.253777 4721 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a32f027b-e2b7-439a-b2c4-8b8df869d99c-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.612759 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.770283 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e8b5dbed-98a7-408e-89fa-9de1000db0fd-run-httpd\") pod \"e8b5dbed-98a7-408e-89fa-9de1000db0fd\" (UID: \"e8b5dbed-98a7-408e-89fa-9de1000db0fd\") " Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.770327 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e8b5dbed-98a7-408e-89fa-9de1000db0fd-etc-swift\") pod \"e8b5dbed-98a7-408e-89fa-9de1000db0fd\" (UID: \"e8b5dbed-98a7-408e-89fa-9de1000db0fd\") " Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.770382 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e8b5dbed-98a7-408e-89fa-9de1000db0fd-log-httpd\") pod \"e8b5dbed-98a7-408e-89fa-9de1000db0fd\" (UID: \"e8b5dbed-98a7-408e-89fa-9de1000db0fd\") " Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.770405 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25gj8\" (UniqueName: \"kubernetes.io/projected/e8b5dbed-98a7-408e-89fa-9de1000db0fd-kube-api-access-25gj8\") pod \"e8b5dbed-98a7-408e-89fa-9de1000db0fd\" (UID: \"e8b5dbed-98a7-408e-89fa-9de1000db0fd\") " Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.770458 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8b5dbed-98a7-408e-89fa-9de1000db0fd-config-data\") pod \"e8b5dbed-98a7-408e-89fa-9de1000db0fd\" (UID: \"e8b5dbed-98a7-408e-89fa-9de1000db0fd\") " Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.770730 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e8b5dbed-98a7-408e-89fa-9de1000db0fd-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e8b5dbed-98a7-408e-89fa-9de1000db0fd" (UID: "e8b5dbed-98a7-408e-89fa-9de1000db0fd"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.770838 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e8b5dbed-98a7-408e-89fa-9de1000db0fd-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e8b5dbed-98a7-408e-89fa-9de1000db0fd" (UID: "e8b5dbed-98a7-408e-89fa-9de1000db0fd"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.775187 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8b5dbed-98a7-408e-89fa-9de1000db0fd-kube-api-access-25gj8" (OuterVolumeSpecName: "kube-api-access-25gj8") pod "e8b5dbed-98a7-408e-89fa-9de1000db0fd" (UID: "e8b5dbed-98a7-408e-89fa-9de1000db0fd"). InnerVolumeSpecName "kube-api-access-25gj8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.789556 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8b5dbed-98a7-408e-89fa-9de1000db0fd-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "e8b5dbed-98a7-408e-89fa-9de1000db0fd" (UID: "e8b5dbed-98a7-408e-89fa-9de1000db0fd"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.811357 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8b5dbed-98a7-408e-89fa-9de1000db0fd-config-data" (OuterVolumeSpecName: "config-data") pod "e8b5dbed-98a7-408e-89fa-9de1000db0fd" (UID: "e8b5dbed-98a7-408e-89fa-9de1000db0fd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.848380 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a32f027b-e2b7-439a-b2c4-8b8df869d99c" path="/var/lib/kubelet/pods/a32f027b-e2b7-439a-b2c4-8b8df869d99c/volumes" Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.848976 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc0b7803-607e-4bc1-8268-a919c487ae58" path="/var/lib/kubelet/pods/fc0b7803-607e-4bc1-8268-a919c487ae58/volumes" Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.872029 4721 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e8b5dbed-98a7-408e-89fa-9de1000db0fd-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.872054 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25gj8\" (UniqueName: \"kubernetes.io/projected/e8b5dbed-98a7-408e-89fa-9de1000db0fd-kube-api-access-25gj8\") on node \"crc\" DevicePath \"\"" Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.872065 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8b5dbed-98a7-408e-89fa-9de1000db0fd-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.872074 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e8b5dbed-98a7-408e-89fa-9de1000db0fd-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:12:24 crc kubenswrapper[4721]: I1202 12:12:24.872082 4721 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e8b5dbed-98a7-408e-89fa-9de1000db0fd-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 12:12:25 crc kubenswrapper[4721]: I1202 12:12:25.148703 4721 generic.go:334] "Generic (PLEG): container finished" podID="e8b5dbed-98a7-408e-89fa-9de1000db0fd" containerID="bad63ff065a83b78f743a6160204f04992feeb7ad286aa4f36d47b09d53c915a" exitCode=0 Dec 02 12:12:25 crc kubenswrapper[4721]: I1202 12:12:25.148771 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" event={"ID":"e8b5dbed-98a7-408e-89fa-9de1000db0fd","Type":"ContainerDied","Data":"bad63ff065a83b78f743a6160204f04992feeb7ad286aa4f36d47b09d53c915a"} Dec 02 12:12:25 crc kubenswrapper[4721]: I1202 12:12:25.148800 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" event={"ID":"e8b5dbed-98a7-408e-89fa-9de1000db0fd","Type":"ContainerDied","Data":"331f9e4cb4f2d799cc69e9a91caaebdbcc0fb943d618ebe65895aa81661b8514"} Dec 02 12:12:25 crc kubenswrapper[4721]: I1202 12:12:25.148822 4721 scope.go:117] "RemoveContainer" containerID="bad63ff065a83b78f743a6160204f04992feeb7ad286aa4f36d47b09d53c915a" Dec 02 12:12:25 crc kubenswrapper[4721]: I1202 12:12:25.148955 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-7fff86655-lrpjm" Dec 02 12:12:25 crc kubenswrapper[4721]: I1202 12:12:25.158609 4721 generic.go:334] "Generic (PLEG): container finished" podID="abd15e9e-08cc-451c-903a-09250321067c" containerID="628e2ea957b2e937b293ba93b734911e730d56d108809fe05e1688b2f9353e2b" exitCode=0 Dec 02 12:12:25 crc kubenswrapper[4721]: I1202 12:12:25.158634 4721 generic.go:334] "Generic (PLEG): container finished" podID="abd15e9e-08cc-451c-903a-09250321067c" containerID="ef989123761d323f09877e23df927ef8328626d67fc841d36df3b4c2fe983f1e" exitCode=0 Dec 02 12:12:25 crc kubenswrapper[4721]: I1202 12:12:25.158642 4721 generic.go:334] "Generic (PLEG): container finished" podID="abd15e9e-08cc-451c-903a-09250321067c" containerID="b0e5efdedbd1f5fabe8998107f2587abe4734f6390787cc1e4e744765e6b4416" exitCode=0 Dec 02 12:12:25 crc kubenswrapper[4721]: I1202 12:12:25.158650 4721 generic.go:334] "Generic (PLEG): container finished" podID="abd15e9e-08cc-451c-903a-09250321067c" containerID="5bb187cdc2fa33ef6bcbc8229c85e0e3024c7a60613a8708957d4b86b8be6e95" exitCode=0 Dec 02 12:12:25 crc kubenswrapper[4721]: I1202 12:12:25.158651 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerDied","Data":"628e2ea957b2e937b293ba93b734911e730d56d108809fe05e1688b2f9353e2b"} Dec 02 12:12:25 crc kubenswrapper[4721]: I1202 12:12:25.158692 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerDied","Data":"ef989123761d323f09877e23df927ef8328626d67fc841d36df3b4c2fe983f1e"} Dec 02 12:12:25 crc kubenswrapper[4721]: I1202 12:12:25.158702 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerDied","Data":"b0e5efdedbd1f5fabe8998107f2587abe4734f6390787cc1e4e744765e6b4416"} Dec 02 12:12:25 crc kubenswrapper[4721]: I1202 12:12:25.158712 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerDied","Data":"5bb187cdc2fa33ef6bcbc8229c85e0e3024c7a60613a8708957d4b86b8be6e95"} Dec 02 12:12:25 crc kubenswrapper[4721]: I1202 12:12:25.168396 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-proxy-7fff86655-lrpjm"] Dec 02 12:12:25 crc kubenswrapper[4721]: I1202 12:12:25.175107 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-proxy-7fff86655-lrpjm"] Dec 02 12:12:25 crc kubenswrapper[4721]: I1202 12:12:25.180897 4721 scope.go:117] "RemoveContainer" containerID="8286eb8f0da0edabb525ade0a77e7ba7c03541194af514ffa3d0c93f9414cc80" Dec 02 12:12:25 crc kubenswrapper[4721]: I1202 12:12:25.197585 4721 scope.go:117] "RemoveContainer" containerID="bad63ff065a83b78f743a6160204f04992feeb7ad286aa4f36d47b09d53c915a" Dec 02 12:12:25 crc kubenswrapper[4721]: E1202 12:12:25.197942 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bad63ff065a83b78f743a6160204f04992feeb7ad286aa4f36d47b09d53c915a\": container with ID starting with bad63ff065a83b78f743a6160204f04992feeb7ad286aa4f36d47b09d53c915a not found: ID does not exist" containerID="bad63ff065a83b78f743a6160204f04992feeb7ad286aa4f36d47b09d53c915a" Dec 02 12:12:25 crc kubenswrapper[4721]: I1202 12:12:25.198024 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bad63ff065a83b78f743a6160204f04992feeb7ad286aa4f36d47b09d53c915a"} err="failed to get container status \"bad63ff065a83b78f743a6160204f04992feeb7ad286aa4f36d47b09d53c915a\": rpc error: code = NotFound desc = could not find container \"bad63ff065a83b78f743a6160204f04992feeb7ad286aa4f36d47b09d53c915a\": container with ID starting with bad63ff065a83b78f743a6160204f04992feeb7ad286aa4f36d47b09d53c915a not found: ID does not exist" Dec 02 12:12:25 crc kubenswrapper[4721]: I1202 12:12:25.198120 4721 scope.go:117] "RemoveContainer" containerID="8286eb8f0da0edabb525ade0a77e7ba7c03541194af514ffa3d0c93f9414cc80" Dec 02 12:12:25 crc kubenswrapper[4721]: E1202 12:12:25.198728 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8286eb8f0da0edabb525ade0a77e7ba7c03541194af514ffa3d0c93f9414cc80\": container with ID starting with 8286eb8f0da0edabb525ade0a77e7ba7c03541194af514ffa3d0c93f9414cc80 not found: ID does not exist" containerID="8286eb8f0da0edabb525ade0a77e7ba7c03541194af514ffa3d0c93f9414cc80" Dec 02 12:12:25 crc kubenswrapper[4721]: I1202 12:12:25.198763 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8286eb8f0da0edabb525ade0a77e7ba7c03541194af514ffa3d0c93f9414cc80"} err="failed to get container status \"8286eb8f0da0edabb525ade0a77e7ba7c03541194af514ffa3d0c93f9414cc80\": rpc error: code = NotFound desc = could not find container \"8286eb8f0da0edabb525ade0a77e7ba7c03541194af514ffa3d0c93f9414cc80\": container with ID starting with 8286eb8f0da0edabb525ade0a77e7ba7c03541194af514ffa3d0c93f9414cc80 not found: ID does not exist" Dec 02 12:12:26 crc kubenswrapper[4721]: I1202 12:12:26.852472 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8b5dbed-98a7-408e-89fa-9de1000db0fd" path="/var/lib/kubelet/pods/e8b5dbed-98a7-408e-89fa-9de1000db0fd/volumes" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.040552 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.100334 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"abd15e9e-08cc-451c-903a-09250321067c\" (UID: \"abd15e9e-08cc-451c-903a-09250321067c\") " Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.100463 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/abd15e9e-08cc-451c-903a-09250321067c-lock\") pod \"abd15e9e-08cc-451c-903a-09250321067c\" (UID: \"abd15e9e-08cc-451c-903a-09250321067c\") " Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.100511 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/abd15e9e-08cc-451c-903a-09250321067c-cache\") pod \"abd15e9e-08cc-451c-903a-09250321067c\" (UID: \"abd15e9e-08cc-451c-903a-09250321067c\") " Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.100549 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/abd15e9e-08cc-451c-903a-09250321067c-etc-swift\") pod \"abd15e9e-08cc-451c-903a-09250321067c\" (UID: \"abd15e9e-08cc-451c-903a-09250321067c\") " Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.100587 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wkcks\" (UniqueName: \"kubernetes.io/projected/abd15e9e-08cc-451c-903a-09250321067c-kube-api-access-wkcks\") pod \"abd15e9e-08cc-451c-903a-09250321067c\" (UID: \"abd15e9e-08cc-451c-903a-09250321067c\") " Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.101044 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abd15e9e-08cc-451c-903a-09250321067c-lock" (OuterVolumeSpecName: "lock") pod "abd15e9e-08cc-451c-903a-09250321067c" (UID: "abd15e9e-08cc-451c-903a-09250321067c"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.101143 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abd15e9e-08cc-451c-903a-09250321067c-cache" (OuterVolumeSpecName: "cache") pod "abd15e9e-08cc-451c-903a-09250321067c" (UID: "abd15e9e-08cc-451c-903a-09250321067c"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.105928 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abd15e9e-08cc-451c-903a-09250321067c-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "abd15e9e-08cc-451c-903a-09250321067c" (UID: "abd15e9e-08cc-451c-903a-09250321067c"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.105946 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abd15e9e-08cc-451c-903a-09250321067c-kube-api-access-wkcks" (OuterVolumeSpecName: "kube-api-access-wkcks") pod "abd15e9e-08cc-451c-903a-09250321067c" (UID: "abd15e9e-08cc-451c-903a-09250321067c"). InnerVolumeSpecName "kube-api-access-wkcks". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.106110 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "swift") pod "abd15e9e-08cc-451c-903a-09250321067c" (UID: "abd15e9e-08cc-451c-903a-09250321067c"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.201781 4721 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.201809 4721 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/abd15e9e-08cc-451c-903a-09250321067c-lock\") on node \"crc\" DevicePath \"\"" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.201818 4721 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/abd15e9e-08cc-451c-903a-09250321067c-cache\") on node \"crc\" DevicePath \"\"" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.201827 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/abd15e9e-08cc-451c-903a-09250321067c-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.201835 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wkcks\" (UniqueName: \"kubernetes.io/projected/abd15e9e-08cc-451c-903a-09250321067c-kube-api-access-wkcks\") on node \"crc\" DevicePath \"\"" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.214645 4721 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.304987 4721 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.419312 4721 generic.go:334] "Generic (PLEG): container finished" podID="abd15e9e-08cc-451c-903a-09250321067c" containerID="029bae74661c008a7bd2495205c5b40e47b7ed126d66e720bd751eca278f8a64" exitCode=137 Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.419425 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.419396 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerDied","Data":"029bae74661c008a7bd2495205c5b40e47b7ed126d66e720bd751eca278f8a64"} Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.419805 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"abd15e9e-08cc-451c-903a-09250321067c","Type":"ContainerDied","Data":"954f9eef21487dab0c53d6de6385170b145616ea67d1fdaff049ddafe769689f"} Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.419849 4721 scope.go:117] "RemoveContainer" containerID="0dbb6e125a022d781ba4771a7e6e8da34453bcadf734863dadb35308bc8ed993" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.440181 4721 scope.go:117] "RemoveContainer" containerID="029bae74661c008a7bd2495205c5b40e47b7ed126d66e720bd751eca278f8a64" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.475192 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.488281 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.509540 4721 scope.go:117] "RemoveContainer" containerID="628e2ea957b2e937b293ba93b734911e730d56d108809fe05e1688b2f9353e2b" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.542535 4721 scope.go:117] "RemoveContainer" containerID="97e6d994028248d3b24bc8277f9289824a5910b07e7de150598a88504cd769b7" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.560484 4721 scope.go:117] "RemoveContainer" containerID="4186a237272b72b6155c9ef4e9a70b2c12aa6d1fb15164ae0894bce037795924" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.575095 4721 scope.go:117] "RemoveContainer" containerID="771040ed153d1ea3dbd1bc88ba9f26ddd441e46bbff0089ee6cac94e90bac57c" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.590655 4721 scope.go:117] "RemoveContainer" containerID="3a6693d4d449985ba1e91f61dc8c74bacb7d85efe40dc0fcdf44947f33ef8eb0" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.611103 4721 scope.go:117] "RemoveContainer" containerID="ef989123761d323f09877e23df927ef8328626d67fc841d36df3b4c2fe983f1e" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.628366 4721 scope.go:117] "RemoveContainer" containerID="127d17d32a0876ca28fb0aa7c7bf329e19d6ba3dd60035a33df7a754b979fbc4" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.642820 4721 scope.go:117] "RemoveContainer" containerID="c1ac0005a2eec5bb71049886a8888df3b036491c3bb2c173a455fa754615bd56" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.656918 4721 scope.go:117] "RemoveContainer" containerID="ed051aea4154d9418fb64a82e565ae3d3d7984f5b5598da57baf244fe16f0e2d" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.668865 4721 scope.go:117] "RemoveContainer" containerID="b0e5efdedbd1f5fabe8998107f2587abe4734f6390787cc1e4e744765e6b4416" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.682187 4721 scope.go:117] "RemoveContainer" containerID="1c735c7457a2fd9bcc93e8d51ec6e1196f1e87213b7ceb3888d75ea35c500809" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.693411 4721 scope.go:117] "RemoveContainer" containerID="33a93293a5e6d46fbaa37920212e7c74478e7df95940b9b1755ff8f9857cf75b" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.707619 4721 scope.go:117] "RemoveContainer" containerID="450139c98f8a059d63d01f895ec458a14cf2dd6bd7a8413db68f33900cd39268" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.721406 4721 scope.go:117] "RemoveContainer" containerID="5bb187cdc2fa33ef6bcbc8229c85e0e3024c7a60613a8708957d4b86b8be6e95" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.734979 4721 scope.go:117] "RemoveContainer" containerID="0dbb6e125a022d781ba4771a7e6e8da34453bcadf734863dadb35308bc8ed993" Dec 02 12:12:54 crc kubenswrapper[4721]: E1202 12:12:54.735387 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0dbb6e125a022d781ba4771a7e6e8da34453bcadf734863dadb35308bc8ed993\": container with ID starting with 0dbb6e125a022d781ba4771a7e6e8da34453bcadf734863dadb35308bc8ed993 not found: ID does not exist" containerID="0dbb6e125a022d781ba4771a7e6e8da34453bcadf734863dadb35308bc8ed993" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.735437 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0dbb6e125a022d781ba4771a7e6e8da34453bcadf734863dadb35308bc8ed993"} err="failed to get container status \"0dbb6e125a022d781ba4771a7e6e8da34453bcadf734863dadb35308bc8ed993\": rpc error: code = NotFound desc = could not find container \"0dbb6e125a022d781ba4771a7e6e8da34453bcadf734863dadb35308bc8ed993\": container with ID starting with 0dbb6e125a022d781ba4771a7e6e8da34453bcadf734863dadb35308bc8ed993 not found: ID does not exist" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.735458 4721 scope.go:117] "RemoveContainer" containerID="029bae74661c008a7bd2495205c5b40e47b7ed126d66e720bd751eca278f8a64" Dec 02 12:12:54 crc kubenswrapper[4721]: E1202 12:12:54.735761 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"029bae74661c008a7bd2495205c5b40e47b7ed126d66e720bd751eca278f8a64\": container with ID starting with 029bae74661c008a7bd2495205c5b40e47b7ed126d66e720bd751eca278f8a64 not found: ID does not exist" containerID="029bae74661c008a7bd2495205c5b40e47b7ed126d66e720bd751eca278f8a64" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.735781 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"029bae74661c008a7bd2495205c5b40e47b7ed126d66e720bd751eca278f8a64"} err="failed to get container status \"029bae74661c008a7bd2495205c5b40e47b7ed126d66e720bd751eca278f8a64\": rpc error: code = NotFound desc = could not find container \"029bae74661c008a7bd2495205c5b40e47b7ed126d66e720bd751eca278f8a64\": container with ID starting with 029bae74661c008a7bd2495205c5b40e47b7ed126d66e720bd751eca278f8a64 not found: ID does not exist" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.735793 4721 scope.go:117] "RemoveContainer" containerID="628e2ea957b2e937b293ba93b734911e730d56d108809fe05e1688b2f9353e2b" Dec 02 12:12:54 crc kubenswrapper[4721]: E1202 12:12:54.736014 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"628e2ea957b2e937b293ba93b734911e730d56d108809fe05e1688b2f9353e2b\": container with ID starting with 628e2ea957b2e937b293ba93b734911e730d56d108809fe05e1688b2f9353e2b not found: ID does not exist" containerID="628e2ea957b2e937b293ba93b734911e730d56d108809fe05e1688b2f9353e2b" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.736035 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"628e2ea957b2e937b293ba93b734911e730d56d108809fe05e1688b2f9353e2b"} err="failed to get container status \"628e2ea957b2e937b293ba93b734911e730d56d108809fe05e1688b2f9353e2b\": rpc error: code = NotFound desc = could not find container \"628e2ea957b2e937b293ba93b734911e730d56d108809fe05e1688b2f9353e2b\": container with ID starting with 628e2ea957b2e937b293ba93b734911e730d56d108809fe05e1688b2f9353e2b not found: ID does not exist" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.736046 4721 scope.go:117] "RemoveContainer" containerID="97e6d994028248d3b24bc8277f9289824a5910b07e7de150598a88504cd769b7" Dec 02 12:12:54 crc kubenswrapper[4721]: E1202 12:12:54.736197 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97e6d994028248d3b24bc8277f9289824a5910b07e7de150598a88504cd769b7\": container with ID starting with 97e6d994028248d3b24bc8277f9289824a5910b07e7de150598a88504cd769b7 not found: ID does not exist" containerID="97e6d994028248d3b24bc8277f9289824a5910b07e7de150598a88504cd769b7" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.736214 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97e6d994028248d3b24bc8277f9289824a5910b07e7de150598a88504cd769b7"} err="failed to get container status \"97e6d994028248d3b24bc8277f9289824a5910b07e7de150598a88504cd769b7\": rpc error: code = NotFound desc = could not find container \"97e6d994028248d3b24bc8277f9289824a5910b07e7de150598a88504cd769b7\": container with ID starting with 97e6d994028248d3b24bc8277f9289824a5910b07e7de150598a88504cd769b7 not found: ID does not exist" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.736229 4721 scope.go:117] "RemoveContainer" containerID="4186a237272b72b6155c9ef4e9a70b2c12aa6d1fb15164ae0894bce037795924" Dec 02 12:12:54 crc kubenswrapper[4721]: E1202 12:12:54.736434 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4186a237272b72b6155c9ef4e9a70b2c12aa6d1fb15164ae0894bce037795924\": container with ID starting with 4186a237272b72b6155c9ef4e9a70b2c12aa6d1fb15164ae0894bce037795924 not found: ID does not exist" containerID="4186a237272b72b6155c9ef4e9a70b2c12aa6d1fb15164ae0894bce037795924" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.736454 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4186a237272b72b6155c9ef4e9a70b2c12aa6d1fb15164ae0894bce037795924"} err="failed to get container status \"4186a237272b72b6155c9ef4e9a70b2c12aa6d1fb15164ae0894bce037795924\": rpc error: code = NotFound desc = could not find container \"4186a237272b72b6155c9ef4e9a70b2c12aa6d1fb15164ae0894bce037795924\": container with ID starting with 4186a237272b72b6155c9ef4e9a70b2c12aa6d1fb15164ae0894bce037795924 not found: ID does not exist" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.736466 4721 scope.go:117] "RemoveContainer" containerID="771040ed153d1ea3dbd1bc88ba9f26ddd441e46bbff0089ee6cac94e90bac57c" Dec 02 12:12:54 crc kubenswrapper[4721]: E1202 12:12:54.736630 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"771040ed153d1ea3dbd1bc88ba9f26ddd441e46bbff0089ee6cac94e90bac57c\": container with ID starting with 771040ed153d1ea3dbd1bc88ba9f26ddd441e46bbff0089ee6cac94e90bac57c not found: ID does not exist" containerID="771040ed153d1ea3dbd1bc88ba9f26ddd441e46bbff0089ee6cac94e90bac57c" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.736647 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"771040ed153d1ea3dbd1bc88ba9f26ddd441e46bbff0089ee6cac94e90bac57c"} err="failed to get container status \"771040ed153d1ea3dbd1bc88ba9f26ddd441e46bbff0089ee6cac94e90bac57c\": rpc error: code = NotFound desc = could not find container \"771040ed153d1ea3dbd1bc88ba9f26ddd441e46bbff0089ee6cac94e90bac57c\": container with ID starting with 771040ed153d1ea3dbd1bc88ba9f26ddd441e46bbff0089ee6cac94e90bac57c not found: ID does not exist" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.736662 4721 scope.go:117] "RemoveContainer" containerID="3a6693d4d449985ba1e91f61dc8c74bacb7d85efe40dc0fcdf44947f33ef8eb0" Dec 02 12:12:54 crc kubenswrapper[4721]: E1202 12:12:54.736890 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a6693d4d449985ba1e91f61dc8c74bacb7d85efe40dc0fcdf44947f33ef8eb0\": container with ID starting with 3a6693d4d449985ba1e91f61dc8c74bacb7d85efe40dc0fcdf44947f33ef8eb0 not found: ID does not exist" containerID="3a6693d4d449985ba1e91f61dc8c74bacb7d85efe40dc0fcdf44947f33ef8eb0" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.736910 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a6693d4d449985ba1e91f61dc8c74bacb7d85efe40dc0fcdf44947f33ef8eb0"} err="failed to get container status \"3a6693d4d449985ba1e91f61dc8c74bacb7d85efe40dc0fcdf44947f33ef8eb0\": rpc error: code = NotFound desc = could not find container \"3a6693d4d449985ba1e91f61dc8c74bacb7d85efe40dc0fcdf44947f33ef8eb0\": container with ID starting with 3a6693d4d449985ba1e91f61dc8c74bacb7d85efe40dc0fcdf44947f33ef8eb0 not found: ID does not exist" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.736922 4721 scope.go:117] "RemoveContainer" containerID="ef989123761d323f09877e23df927ef8328626d67fc841d36df3b4c2fe983f1e" Dec 02 12:12:54 crc kubenswrapper[4721]: E1202 12:12:54.737109 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef989123761d323f09877e23df927ef8328626d67fc841d36df3b4c2fe983f1e\": container with ID starting with ef989123761d323f09877e23df927ef8328626d67fc841d36df3b4c2fe983f1e not found: ID does not exist" containerID="ef989123761d323f09877e23df927ef8328626d67fc841d36df3b4c2fe983f1e" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.737127 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef989123761d323f09877e23df927ef8328626d67fc841d36df3b4c2fe983f1e"} err="failed to get container status \"ef989123761d323f09877e23df927ef8328626d67fc841d36df3b4c2fe983f1e\": rpc error: code = NotFound desc = could not find container \"ef989123761d323f09877e23df927ef8328626d67fc841d36df3b4c2fe983f1e\": container with ID starting with ef989123761d323f09877e23df927ef8328626d67fc841d36df3b4c2fe983f1e not found: ID does not exist" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.737139 4721 scope.go:117] "RemoveContainer" containerID="127d17d32a0876ca28fb0aa7c7bf329e19d6ba3dd60035a33df7a754b979fbc4" Dec 02 12:12:54 crc kubenswrapper[4721]: E1202 12:12:54.737462 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"127d17d32a0876ca28fb0aa7c7bf329e19d6ba3dd60035a33df7a754b979fbc4\": container with ID starting with 127d17d32a0876ca28fb0aa7c7bf329e19d6ba3dd60035a33df7a754b979fbc4 not found: ID does not exist" containerID="127d17d32a0876ca28fb0aa7c7bf329e19d6ba3dd60035a33df7a754b979fbc4" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.737482 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"127d17d32a0876ca28fb0aa7c7bf329e19d6ba3dd60035a33df7a754b979fbc4"} err="failed to get container status \"127d17d32a0876ca28fb0aa7c7bf329e19d6ba3dd60035a33df7a754b979fbc4\": rpc error: code = NotFound desc = could not find container \"127d17d32a0876ca28fb0aa7c7bf329e19d6ba3dd60035a33df7a754b979fbc4\": container with ID starting with 127d17d32a0876ca28fb0aa7c7bf329e19d6ba3dd60035a33df7a754b979fbc4 not found: ID does not exist" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.737495 4721 scope.go:117] "RemoveContainer" containerID="c1ac0005a2eec5bb71049886a8888df3b036491c3bb2c173a455fa754615bd56" Dec 02 12:12:54 crc kubenswrapper[4721]: E1202 12:12:54.737674 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1ac0005a2eec5bb71049886a8888df3b036491c3bb2c173a455fa754615bd56\": container with ID starting with c1ac0005a2eec5bb71049886a8888df3b036491c3bb2c173a455fa754615bd56 not found: ID does not exist" containerID="c1ac0005a2eec5bb71049886a8888df3b036491c3bb2c173a455fa754615bd56" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.737693 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1ac0005a2eec5bb71049886a8888df3b036491c3bb2c173a455fa754615bd56"} err="failed to get container status \"c1ac0005a2eec5bb71049886a8888df3b036491c3bb2c173a455fa754615bd56\": rpc error: code = NotFound desc = could not find container \"c1ac0005a2eec5bb71049886a8888df3b036491c3bb2c173a455fa754615bd56\": container with ID starting with c1ac0005a2eec5bb71049886a8888df3b036491c3bb2c173a455fa754615bd56 not found: ID does not exist" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.737706 4721 scope.go:117] "RemoveContainer" containerID="ed051aea4154d9418fb64a82e565ae3d3d7984f5b5598da57baf244fe16f0e2d" Dec 02 12:12:54 crc kubenswrapper[4721]: E1202 12:12:54.737942 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed051aea4154d9418fb64a82e565ae3d3d7984f5b5598da57baf244fe16f0e2d\": container with ID starting with ed051aea4154d9418fb64a82e565ae3d3d7984f5b5598da57baf244fe16f0e2d not found: ID does not exist" containerID="ed051aea4154d9418fb64a82e565ae3d3d7984f5b5598da57baf244fe16f0e2d" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.737991 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed051aea4154d9418fb64a82e565ae3d3d7984f5b5598da57baf244fe16f0e2d"} err="failed to get container status \"ed051aea4154d9418fb64a82e565ae3d3d7984f5b5598da57baf244fe16f0e2d\": rpc error: code = NotFound desc = could not find container \"ed051aea4154d9418fb64a82e565ae3d3d7984f5b5598da57baf244fe16f0e2d\": container with ID starting with ed051aea4154d9418fb64a82e565ae3d3d7984f5b5598da57baf244fe16f0e2d not found: ID does not exist" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.738004 4721 scope.go:117] "RemoveContainer" containerID="b0e5efdedbd1f5fabe8998107f2587abe4734f6390787cc1e4e744765e6b4416" Dec 02 12:12:54 crc kubenswrapper[4721]: E1202 12:12:54.738201 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0e5efdedbd1f5fabe8998107f2587abe4734f6390787cc1e4e744765e6b4416\": container with ID starting with b0e5efdedbd1f5fabe8998107f2587abe4734f6390787cc1e4e744765e6b4416 not found: ID does not exist" containerID="b0e5efdedbd1f5fabe8998107f2587abe4734f6390787cc1e4e744765e6b4416" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.738224 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0e5efdedbd1f5fabe8998107f2587abe4734f6390787cc1e4e744765e6b4416"} err="failed to get container status \"b0e5efdedbd1f5fabe8998107f2587abe4734f6390787cc1e4e744765e6b4416\": rpc error: code = NotFound desc = could not find container \"b0e5efdedbd1f5fabe8998107f2587abe4734f6390787cc1e4e744765e6b4416\": container with ID starting with b0e5efdedbd1f5fabe8998107f2587abe4734f6390787cc1e4e744765e6b4416 not found: ID does not exist" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.738241 4721 scope.go:117] "RemoveContainer" containerID="1c735c7457a2fd9bcc93e8d51ec6e1196f1e87213b7ceb3888d75ea35c500809" Dec 02 12:12:54 crc kubenswrapper[4721]: E1202 12:12:54.738595 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c735c7457a2fd9bcc93e8d51ec6e1196f1e87213b7ceb3888d75ea35c500809\": container with ID starting with 1c735c7457a2fd9bcc93e8d51ec6e1196f1e87213b7ceb3888d75ea35c500809 not found: ID does not exist" containerID="1c735c7457a2fd9bcc93e8d51ec6e1196f1e87213b7ceb3888d75ea35c500809" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.738617 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c735c7457a2fd9bcc93e8d51ec6e1196f1e87213b7ceb3888d75ea35c500809"} err="failed to get container status \"1c735c7457a2fd9bcc93e8d51ec6e1196f1e87213b7ceb3888d75ea35c500809\": rpc error: code = NotFound desc = could not find container \"1c735c7457a2fd9bcc93e8d51ec6e1196f1e87213b7ceb3888d75ea35c500809\": container with ID starting with 1c735c7457a2fd9bcc93e8d51ec6e1196f1e87213b7ceb3888d75ea35c500809 not found: ID does not exist" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.738635 4721 scope.go:117] "RemoveContainer" containerID="33a93293a5e6d46fbaa37920212e7c74478e7df95940b9b1755ff8f9857cf75b" Dec 02 12:12:54 crc kubenswrapper[4721]: E1202 12:12:54.738881 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33a93293a5e6d46fbaa37920212e7c74478e7df95940b9b1755ff8f9857cf75b\": container with ID starting with 33a93293a5e6d46fbaa37920212e7c74478e7df95940b9b1755ff8f9857cf75b not found: ID does not exist" containerID="33a93293a5e6d46fbaa37920212e7c74478e7df95940b9b1755ff8f9857cf75b" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.738905 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33a93293a5e6d46fbaa37920212e7c74478e7df95940b9b1755ff8f9857cf75b"} err="failed to get container status \"33a93293a5e6d46fbaa37920212e7c74478e7df95940b9b1755ff8f9857cf75b\": rpc error: code = NotFound desc = could not find container \"33a93293a5e6d46fbaa37920212e7c74478e7df95940b9b1755ff8f9857cf75b\": container with ID starting with 33a93293a5e6d46fbaa37920212e7c74478e7df95940b9b1755ff8f9857cf75b not found: ID does not exist" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.738922 4721 scope.go:117] "RemoveContainer" containerID="450139c98f8a059d63d01f895ec458a14cf2dd6bd7a8413db68f33900cd39268" Dec 02 12:12:54 crc kubenswrapper[4721]: E1202 12:12:54.739234 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"450139c98f8a059d63d01f895ec458a14cf2dd6bd7a8413db68f33900cd39268\": container with ID starting with 450139c98f8a059d63d01f895ec458a14cf2dd6bd7a8413db68f33900cd39268 not found: ID does not exist" containerID="450139c98f8a059d63d01f895ec458a14cf2dd6bd7a8413db68f33900cd39268" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.739259 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"450139c98f8a059d63d01f895ec458a14cf2dd6bd7a8413db68f33900cd39268"} err="failed to get container status \"450139c98f8a059d63d01f895ec458a14cf2dd6bd7a8413db68f33900cd39268\": rpc error: code = NotFound desc = could not find container \"450139c98f8a059d63d01f895ec458a14cf2dd6bd7a8413db68f33900cd39268\": container with ID starting with 450139c98f8a059d63d01f895ec458a14cf2dd6bd7a8413db68f33900cd39268 not found: ID does not exist" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.739277 4721 scope.go:117] "RemoveContainer" containerID="5bb187cdc2fa33ef6bcbc8229c85e0e3024c7a60613a8708957d4b86b8be6e95" Dec 02 12:12:54 crc kubenswrapper[4721]: E1202 12:12:54.739773 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5bb187cdc2fa33ef6bcbc8229c85e0e3024c7a60613a8708957d4b86b8be6e95\": container with ID starting with 5bb187cdc2fa33ef6bcbc8229c85e0e3024c7a60613a8708957d4b86b8be6e95 not found: ID does not exist" containerID="5bb187cdc2fa33ef6bcbc8229c85e0e3024c7a60613a8708957d4b86b8be6e95" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.739802 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bb187cdc2fa33ef6bcbc8229c85e0e3024c7a60613a8708957d4b86b8be6e95"} err="failed to get container status \"5bb187cdc2fa33ef6bcbc8229c85e0e3024c7a60613a8708957d4b86b8be6e95\": rpc error: code = NotFound desc = could not find container \"5bb187cdc2fa33ef6bcbc8229c85e0e3024c7a60613a8708957d4b86b8be6e95\": container with ID starting with 5bb187cdc2fa33ef6bcbc8229c85e0e3024c7a60613a8708957d4b86b8be6e95 not found: ID does not exist" Dec 02 12:12:54 crc kubenswrapper[4721]: I1202 12:12:54.848335 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abd15e9e-08cc-451c-903a-09250321067c" path="/var/lib/kubelet/pods/abd15e9e-08cc-451c-903a-09250321067c/volumes" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.402608 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb"] Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.405232 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="account-server" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.405469 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="account-server" Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.405624 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="account-reaper" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.405755 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="account-reaper" Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.405897 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="rsync" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.406014 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="rsync" Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.406155 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="container-auditor" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.406285 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="container-auditor" Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.406476 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="account-replicator" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.406613 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="account-replicator" Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.406748 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="swift-recon-cron" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.406877 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="swift-recon-cron" Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.407014 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="object-updater" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.407199 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="object-updater" Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.407383 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="container-sharder" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.407524 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="container-sharder" Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.407688 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a32f027b-e2b7-439a-b2c4-8b8df869d99c" containerName="swift-ring-rebalance" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.407821 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="a32f027b-e2b7-439a-b2c4-8b8df869d99c" containerName="swift-ring-rebalance" Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.407966 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="object-auditor" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.408078 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="object-auditor" Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.408186 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="container-replicator" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.408275 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="container-replicator" Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.408391 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="container-updater" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.408482 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="container-updater" Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.408572 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="container-server" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.408672 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="container-server" Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.408889 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="account-auditor" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.408995 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="account-auditor" Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.409103 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="object-server" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.409190 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="object-server" Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.409300 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="object-replicator" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.409425 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="object-replicator" Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.409529 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8b5dbed-98a7-408e-89fa-9de1000db0fd" containerName="proxy-server" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.409616 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8b5dbed-98a7-408e-89fa-9de1000db0fd" containerName="proxy-server" Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.409712 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="object-expirer" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.409796 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="object-expirer" Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.409890 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8b5dbed-98a7-408e-89fa-9de1000db0fd" containerName="proxy-httpd" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.409985 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8b5dbed-98a7-408e-89fa-9de1000db0fd" containerName="proxy-httpd" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.410404 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="account-auditor" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.410534 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="account-server" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.410628 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="object-expirer" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.410723 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="account-reaper" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.410821 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="a32f027b-e2b7-439a-b2c4-8b8df869d99c" containerName="swift-ring-rebalance" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.410917 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8b5dbed-98a7-408e-89fa-9de1000db0fd" containerName="proxy-server" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.411020 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="container-updater" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.411121 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="object-auditor" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.411261 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="object-updater" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.411382 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="object-replicator" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.411483 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="account-replicator" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.411591 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8b5dbed-98a7-408e-89fa-9de1000db0fd" containerName="proxy-httpd" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.411689 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="container-server" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.411788 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="swift-recon-cron" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.411890 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="object-server" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.411980 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="rsync" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.412073 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="container-sharder" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.412164 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="container-auditor" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.412259 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="abd15e9e-08cc-451c-903a-09250321067c" containerName="container-replicator" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.413466 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb"] Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.413683 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.418372 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-proxy-config-data" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.418584 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-files" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.418807 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-swift-dockercfg-5kxhh" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.419518 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-conf" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.472381 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.479513 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.482577 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-storage-config-data" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.483507 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.497106 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.497871 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.505789 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.505920 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.517691 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.529160 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.543404 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/54394d6a-eb39-44ab-b5d3-a38a393991a9-cache\") pod \"swift-storage-0\" (UID: \"54394d6a-eb39-44ab-b5d3-a38a393991a9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.543449 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06581551-03b2-4c6a-add5-cfa6216a461c-run-httpd\") pod \"swift-proxy-8b5dc4bcf-zz2cb\" (UID: \"06581551-03b2-4c6a-add5-cfa6216a461c\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.543503 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/06581551-03b2-4c6a-add5-cfa6216a461c-etc-swift\") pod \"swift-proxy-8b5dc4bcf-zz2cb\" (UID: \"06581551-03b2-4c6a-add5-cfa6216a461c\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.543531 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/54394d6a-eb39-44ab-b5d3-a38a393991a9-lock\") pod \"swift-storage-0\" (UID: \"54394d6a-eb39-44ab-b5d3-a38a393991a9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.543556 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6hwq\" (UniqueName: \"kubernetes.io/projected/54394d6a-eb39-44ab-b5d3-a38a393991a9-kube-api-access-t6hwq\") pod \"swift-storage-0\" (UID: \"54394d6a-eb39-44ab-b5d3-a38a393991a9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.543575 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06581551-03b2-4c6a-add5-cfa6216a461c-config-data\") pod \"swift-proxy-8b5dc4bcf-zz2cb\" (UID: \"06581551-03b2-4c6a-add5-cfa6216a461c\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.543602 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/54394d6a-eb39-44ab-b5d3-a38a393991a9-etc-swift\") pod \"swift-storage-0\" (UID: \"54394d6a-eb39-44ab-b5d3-a38a393991a9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.543629 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"54394d6a-eb39-44ab-b5d3-a38a393991a9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.543652 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jx85d\" (UniqueName: \"kubernetes.io/projected/06581551-03b2-4c6a-add5-cfa6216a461c-kube-api-access-jx85d\") pod \"swift-proxy-8b5dc4bcf-zz2cb\" (UID: \"06581551-03b2-4c6a-add5-cfa6216a461c\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.543670 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06581551-03b2-4c6a-add5-cfa6216a461c-log-httpd\") pod \"swift-proxy-8b5dc4bcf-zz2cb\" (UID: \"06581551-03b2-4c6a-add5-cfa6216a461c\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.644885 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/54394d6a-eb39-44ab-b5d3-a38a393991a9-lock\") pod \"swift-storage-0\" (UID: \"54394d6a-eb39-44ab-b5d3-a38a393991a9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.644940 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-1\" (UID: \"b8f4669a-6b01-451f-91cc-2786303375db\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.644958 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-2\" (UID: \"8bf92b26-9c78-4bde-8b52-56263859b9fb\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.644988 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5ngf\" (UniqueName: \"kubernetes.io/projected/b8f4669a-6b01-451f-91cc-2786303375db-kube-api-access-s5ngf\") pod \"swift-storage-1\" (UID: \"b8f4669a-6b01-451f-91cc-2786303375db\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.645006 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8bf92b26-9c78-4bde-8b52-56263859b9fb-etc-swift\") pod \"swift-storage-2\" (UID: \"8bf92b26-9c78-4bde-8b52-56263859b9fb\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.645022 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6hwq\" (UniqueName: \"kubernetes.io/projected/54394d6a-eb39-44ab-b5d3-a38a393991a9-kube-api-access-t6hwq\") pod \"swift-storage-0\" (UID: \"54394d6a-eb39-44ab-b5d3-a38a393991a9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.645044 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06581551-03b2-4c6a-add5-cfa6216a461c-config-data\") pod \"swift-proxy-8b5dc4bcf-zz2cb\" (UID: \"06581551-03b2-4c6a-add5-cfa6216a461c\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.645067 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/54394d6a-eb39-44ab-b5d3-a38a393991a9-etc-swift\") pod \"swift-storage-0\" (UID: \"54394d6a-eb39-44ab-b5d3-a38a393991a9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.645092 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"54394d6a-eb39-44ab-b5d3-a38a393991a9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.645113 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jx85d\" (UniqueName: \"kubernetes.io/projected/06581551-03b2-4c6a-add5-cfa6216a461c-kube-api-access-jx85d\") pod \"swift-proxy-8b5dc4bcf-zz2cb\" (UID: \"06581551-03b2-4c6a-add5-cfa6216a461c\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.645129 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/b8f4669a-6b01-451f-91cc-2786303375db-cache\") pod \"swift-storage-1\" (UID: \"b8f4669a-6b01-451f-91cc-2786303375db\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.645149 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06581551-03b2-4c6a-add5-cfa6216a461c-log-httpd\") pod \"swift-proxy-8b5dc4bcf-zz2cb\" (UID: \"06581551-03b2-4c6a-add5-cfa6216a461c\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.645178 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/54394d6a-eb39-44ab-b5d3-a38a393991a9-cache\") pod \"swift-storage-0\" (UID: \"54394d6a-eb39-44ab-b5d3-a38a393991a9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.645196 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06581551-03b2-4c6a-add5-cfa6216a461c-run-httpd\") pod \"swift-proxy-8b5dc4bcf-zz2cb\" (UID: \"06581551-03b2-4c6a-add5-cfa6216a461c\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.645221 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/b8f4669a-6b01-451f-91cc-2786303375db-lock\") pod \"swift-storage-1\" (UID: \"b8f4669a-6b01-451f-91cc-2786303375db\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.645247 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b8f4669a-6b01-451f-91cc-2786303375db-etc-swift\") pod \"swift-storage-1\" (UID: \"b8f4669a-6b01-451f-91cc-2786303375db\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.645263 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znzb5\" (UniqueName: \"kubernetes.io/projected/8bf92b26-9c78-4bde-8b52-56263859b9fb-kube-api-access-znzb5\") pod \"swift-storage-2\" (UID: \"8bf92b26-9c78-4bde-8b52-56263859b9fb\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.645284 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/06581551-03b2-4c6a-add5-cfa6216a461c-etc-swift\") pod \"swift-proxy-8b5dc4bcf-zz2cb\" (UID: \"06581551-03b2-4c6a-add5-cfa6216a461c\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.645303 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/8bf92b26-9c78-4bde-8b52-56263859b9fb-lock\") pod \"swift-storage-2\" (UID: \"8bf92b26-9c78-4bde-8b52-56263859b9fb\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.645319 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/8bf92b26-9c78-4bde-8b52-56263859b9fb-cache\") pod \"swift-storage-2\" (UID: \"8bf92b26-9c78-4bde-8b52-56263859b9fb\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.645506 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/54394d6a-eb39-44ab-b5d3-a38a393991a9-lock\") pod \"swift-storage-0\" (UID: \"54394d6a-eb39-44ab-b5d3-a38a393991a9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.645677 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.645695 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.645735 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/54394d6a-eb39-44ab-b5d3-a38a393991a9-etc-swift podName:54394d6a-eb39-44ab-b5d3-a38a393991a9 nodeName:}" failed. No retries permitted until 2025-12-02 12:12:57.145718136 +0000 UTC m=+1306.884268485 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/54394d6a-eb39-44ab-b5d3-a38a393991a9-etc-swift") pod "swift-storage-0" (UID: "54394d6a-eb39-44ab-b5d3-a38a393991a9") : configmap "swift-ring-files" not found Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.645768 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/54394d6a-eb39-44ab-b5d3-a38a393991a9-cache\") pod \"swift-storage-0\" (UID: \"54394d6a-eb39-44ab-b5d3-a38a393991a9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.645970 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06581551-03b2-4c6a-add5-cfa6216a461c-log-httpd\") pod \"swift-proxy-8b5dc4bcf-zz2cb\" (UID: \"06581551-03b2-4c6a-add5-cfa6216a461c\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.645990 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06581551-03b2-4c6a-add5-cfa6216a461c-run-httpd\") pod \"swift-proxy-8b5dc4bcf-zz2cb\" (UID: \"06581551-03b2-4c6a-add5-cfa6216a461c\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.646061 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.646085 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb: configmap "swift-ring-files" not found Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.646176 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"54394d6a-eb39-44ab-b5d3-a38a393991a9\") device mount path \"/mnt/openstack/pv10\"" pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.646212 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/06581551-03b2-4c6a-add5-cfa6216a461c-etc-swift podName:06581551-03b2-4c6a-add5-cfa6216a461c nodeName:}" failed. No retries permitted until 2025-12-02 12:12:57.146204811 +0000 UTC m=+1306.884755160 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/06581551-03b2-4c6a-add5-cfa6216a461c-etc-swift") pod "swift-proxy-8b5dc4bcf-zz2cb" (UID: "06581551-03b2-4c6a-add5-cfa6216a461c") : configmap "swift-ring-files" not found Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.654493 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06581551-03b2-4c6a-add5-cfa6216a461c-config-data\") pod \"swift-proxy-8b5dc4bcf-zz2cb\" (UID: \"06581551-03b2-4c6a-add5-cfa6216a461c\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.663319 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"54394d6a-eb39-44ab-b5d3-a38a393991a9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.663487 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6hwq\" (UniqueName: \"kubernetes.io/projected/54394d6a-eb39-44ab-b5d3-a38a393991a9-kube-api-access-t6hwq\") pod \"swift-storage-0\" (UID: \"54394d6a-eb39-44ab-b5d3-a38a393991a9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.664552 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jx85d\" (UniqueName: \"kubernetes.io/projected/06581551-03b2-4c6a-add5-cfa6216a461c-kube-api-access-jx85d\") pod \"swift-proxy-8b5dc4bcf-zz2cb\" (UID: \"06581551-03b2-4c6a-add5-cfa6216a461c\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.746429 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znzb5\" (UniqueName: \"kubernetes.io/projected/8bf92b26-9c78-4bde-8b52-56263859b9fb-kube-api-access-znzb5\") pod \"swift-storage-2\" (UID: \"8bf92b26-9c78-4bde-8b52-56263859b9fb\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.746757 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b8f4669a-6b01-451f-91cc-2786303375db-etc-swift\") pod \"swift-storage-1\" (UID: \"b8f4669a-6b01-451f-91cc-2786303375db\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.746923 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/8bf92b26-9c78-4bde-8b52-56263859b9fb-lock\") pod \"swift-storage-2\" (UID: \"8bf92b26-9c78-4bde-8b52-56263859b9fb\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.747044 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/8bf92b26-9c78-4bde-8b52-56263859b9fb-cache\") pod \"swift-storage-2\" (UID: \"8bf92b26-9c78-4bde-8b52-56263859b9fb\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.747150 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-1\" (UID: \"b8f4669a-6b01-451f-91cc-2786303375db\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.747247 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-2\" (UID: \"8bf92b26-9c78-4bde-8b52-56263859b9fb\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.747358 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5ngf\" (UniqueName: \"kubernetes.io/projected/b8f4669a-6b01-451f-91cc-2786303375db-kube-api-access-s5ngf\") pod \"swift-storage-1\" (UID: \"b8f4669a-6b01-451f-91cc-2786303375db\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.747440 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/8bf92b26-9c78-4bde-8b52-56263859b9fb-lock\") pod \"swift-storage-2\" (UID: \"8bf92b26-9c78-4bde-8b52-56263859b9fb\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.747473 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-1\" (UID: \"b8f4669a-6b01-451f-91cc-2786303375db\") device mount path \"/mnt/openstack/pv11\"" pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.747485 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/8bf92b26-9c78-4bde-8b52-56263859b9fb-cache\") pod \"swift-storage-2\" (UID: \"8bf92b26-9c78-4bde-8b52-56263859b9fb\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.746925 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.747544 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-1: configmap "swift-ring-files" not found Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.747572 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-2\" (UID: \"8bf92b26-9c78-4bde-8b52-56263859b9fb\") device mount path \"/mnt/openstack/pv03\"" pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.747590 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b8f4669a-6b01-451f-91cc-2786303375db-etc-swift podName:b8f4669a-6b01-451f-91cc-2786303375db nodeName:}" failed. No retries permitted until 2025-12-02 12:12:57.247574939 +0000 UTC m=+1306.986125288 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b8f4669a-6b01-451f-91cc-2786303375db-etc-swift") pod "swift-storage-1" (UID: "b8f4669a-6b01-451f-91cc-2786303375db") : configmap "swift-ring-files" not found Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.747760 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8bf92b26-9c78-4bde-8b52-56263859b9fb-etc-swift\") pod \"swift-storage-2\" (UID: \"8bf92b26-9c78-4bde-8b52-56263859b9fb\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.747905 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/b8f4669a-6b01-451f-91cc-2786303375db-cache\") pod \"swift-storage-1\" (UID: \"b8f4669a-6b01-451f-91cc-2786303375db\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.747977 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.747996 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-2: configmap "swift-ring-files" not found Dec 02 12:12:56 crc kubenswrapper[4721]: E1202 12:12:56.748018 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8bf92b26-9c78-4bde-8b52-56263859b9fb-etc-swift podName:8bf92b26-9c78-4bde-8b52-56263859b9fb nodeName:}" failed. No retries permitted until 2025-12-02 12:12:57.248011173 +0000 UTC m=+1306.986561522 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8bf92b26-9c78-4bde-8b52-56263859b9fb-etc-swift") pod "swift-storage-2" (UID: "8bf92b26-9c78-4bde-8b52-56263859b9fb") : configmap "swift-ring-files" not found Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.748282 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/b8f4669a-6b01-451f-91cc-2786303375db-lock\") pod \"swift-storage-1\" (UID: \"b8f4669a-6b01-451f-91cc-2786303375db\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.748392 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/b8f4669a-6b01-451f-91cc-2786303375db-cache\") pod \"swift-storage-1\" (UID: \"b8f4669a-6b01-451f-91cc-2786303375db\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.748912 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/b8f4669a-6b01-451f-91cc-2786303375db-lock\") pod \"swift-storage-1\" (UID: \"b8f4669a-6b01-451f-91cc-2786303375db\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.763915 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-2\" (UID: \"8bf92b26-9c78-4bde-8b52-56263859b9fb\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.764631 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-1\" (UID: \"b8f4669a-6b01-451f-91cc-2786303375db\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.766759 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5ngf\" (UniqueName: \"kubernetes.io/projected/b8f4669a-6b01-451f-91cc-2786303375db-kube-api-access-s5ngf\") pod \"swift-storage-1\" (UID: \"b8f4669a-6b01-451f-91cc-2786303375db\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:12:56 crc kubenswrapper[4721]: I1202 12:12:56.770359 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znzb5\" (UniqueName: \"kubernetes.io/projected/8bf92b26-9c78-4bde-8b52-56263859b9fb-kube-api-access-znzb5\") pod \"swift-storage-2\" (UID: \"8bf92b26-9c78-4bde-8b52-56263859b9fb\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:12:57 crc kubenswrapper[4721]: I1202 12:12:57.155012 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/06581551-03b2-4c6a-add5-cfa6216a461c-etc-swift\") pod \"swift-proxy-8b5dc4bcf-zz2cb\" (UID: \"06581551-03b2-4c6a-add5-cfa6216a461c\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" Dec 02 12:12:57 crc kubenswrapper[4721]: I1202 12:12:57.155100 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/54394d6a-eb39-44ab-b5d3-a38a393991a9-etc-swift\") pod \"swift-storage-0\" (UID: \"54394d6a-eb39-44ab-b5d3-a38a393991a9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:12:57 crc kubenswrapper[4721]: E1202 12:12:57.155212 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:12:57 crc kubenswrapper[4721]: E1202 12:12:57.155243 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb: configmap "swift-ring-files" not found Dec 02 12:12:57 crc kubenswrapper[4721]: E1202 12:12:57.155251 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:12:57 crc kubenswrapper[4721]: E1202 12:12:57.155270 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 02 12:12:57 crc kubenswrapper[4721]: E1202 12:12:57.155326 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/06581551-03b2-4c6a-add5-cfa6216a461c-etc-swift podName:06581551-03b2-4c6a-add5-cfa6216a461c nodeName:}" failed. No retries permitted until 2025-12-02 12:12:58.155300984 +0000 UTC m=+1307.893851403 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/06581551-03b2-4c6a-add5-cfa6216a461c-etc-swift") pod "swift-proxy-8b5dc4bcf-zz2cb" (UID: "06581551-03b2-4c6a-add5-cfa6216a461c") : configmap "swift-ring-files" not found Dec 02 12:12:57 crc kubenswrapper[4721]: E1202 12:12:57.155363 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/54394d6a-eb39-44ab-b5d3-a38a393991a9-etc-swift podName:54394d6a-eb39-44ab-b5d3-a38a393991a9 nodeName:}" failed. No retries permitted until 2025-12-02 12:12:58.155354025 +0000 UTC m=+1307.893904384 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/54394d6a-eb39-44ab-b5d3-a38a393991a9-etc-swift") pod "swift-storage-0" (UID: "54394d6a-eb39-44ab-b5d3-a38a393991a9") : configmap "swift-ring-files" not found Dec 02 12:12:57 crc kubenswrapper[4721]: I1202 12:12:57.256425 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b8f4669a-6b01-451f-91cc-2786303375db-etc-swift\") pod \"swift-storage-1\" (UID: \"b8f4669a-6b01-451f-91cc-2786303375db\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:12:57 crc kubenswrapper[4721]: I1202 12:12:57.256503 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8bf92b26-9c78-4bde-8b52-56263859b9fb-etc-swift\") pod \"swift-storage-2\" (UID: \"8bf92b26-9c78-4bde-8b52-56263859b9fb\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:12:57 crc kubenswrapper[4721]: E1202 12:12:57.256631 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:12:57 crc kubenswrapper[4721]: E1202 12:12:57.256653 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-1: configmap "swift-ring-files" not found Dec 02 12:12:57 crc kubenswrapper[4721]: E1202 12:12:57.256681 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:12:57 crc kubenswrapper[4721]: E1202 12:12:57.256696 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-2: configmap "swift-ring-files" not found Dec 02 12:12:57 crc kubenswrapper[4721]: E1202 12:12:57.256708 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b8f4669a-6b01-451f-91cc-2786303375db-etc-swift podName:b8f4669a-6b01-451f-91cc-2786303375db nodeName:}" failed. No retries permitted until 2025-12-02 12:12:58.256692403 +0000 UTC m=+1307.995242752 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b8f4669a-6b01-451f-91cc-2786303375db-etc-swift") pod "swift-storage-1" (UID: "b8f4669a-6b01-451f-91cc-2786303375db") : configmap "swift-ring-files" not found Dec 02 12:12:57 crc kubenswrapper[4721]: E1202 12:12:57.256746 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8bf92b26-9c78-4bde-8b52-56263859b9fb-etc-swift podName:8bf92b26-9c78-4bde-8b52-56263859b9fb nodeName:}" failed. No retries permitted until 2025-12-02 12:12:58.256730794 +0000 UTC m=+1307.995281143 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8bf92b26-9c78-4bde-8b52-56263859b9fb-etc-swift") pod "swift-storage-2" (UID: "8bf92b26-9c78-4bde-8b52-56263859b9fb") : configmap "swift-ring-files" not found Dec 02 12:12:58 crc kubenswrapper[4721]: I1202 12:12:58.169764 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/06581551-03b2-4c6a-add5-cfa6216a461c-etc-swift\") pod \"swift-proxy-8b5dc4bcf-zz2cb\" (UID: \"06581551-03b2-4c6a-add5-cfa6216a461c\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" Dec 02 12:12:58 crc kubenswrapper[4721]: I1202 12:12:58.170092 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/54394d6a-eb39-44ab-b5d3-a38a393991a9-etc-swift\") pod \"swift-storage-0\" (UID: \"54394d6a-eb39-44ab-b5d3-a38a393991a9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:12:58 crc kubenswrapper[4721]: E1202 12:12:58.169948 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:12:58 crc kubenswrapper[4721]: E1202 12:12:58.170155 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb: configmap "swift-ring-files" not found Dec 02 12:12:58 crc kubenswrapper[4721]: E1202 12:12:58.170215 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/06581551-03b2-4c6a-add5-cfa6216a461c-etc-swift podName:06581551-03b2-4c6a-add5-cfa6216a461c nodeName:}" failed. No retries permitted until 2025-12-02 12:13:00.170194697 +0000 UTC m=+1309.908745046 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/06581551-03b2-4c6a-add5-cfa6216a461c-etc-swift") pod "swift-proxy-8b5dc4bcf-zz2cb" (UID: "06581551-03b2-4c6a-add5-cfa6216a461c") : configmap "swift-ring-files" not found Dec 02 12:12:58 crc kubenswrapper[4721]: E1202 12:12:58.170294 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:12:58 crc kubenswrapper[4721]: E1202 12:12:58.170309 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 02 12:12:58 crc kubenswrapper[4721]: E1202 12:12:58.170374 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/54394d6a-eb39-44ab-b5d3-a38a393991a9-etc-swift podName:54394d6a-eb39-44ab-b5d3-a38a393991a9 nodeName:}" failed. No retries permitted until 2025-12-02 12:13:00.170360412 +0000 UTC m=+1309.908910761 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/54394d6a-eb39-44ab-b5d3-a38a393991a9-etc-swift") pod "swift-storage-0" (UID: "54394d6a-eb39-44ab-b5d3-a38a393991a9") : configmap "swift-ring-files" not found Dec 02 12:12:58 crc kubenswrapper[4721]: I1202 12:12:58.271377 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b8f4669a-6b01-451f-91cc-2786303375db-etc-swift\") pod \"swift-storage-1\" (UID: \"b8f4669a-6b01-451f-91cc-2786303375db\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:12:58 crc kubenswrapper[4721]: I1202 12:12:58.271454 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8bf92b26-9c78-4bde-8b52-56263859b9fb-etc-swift\") pod \"swift-storage-2\" (UID: \"8bf92b26-9c78-4bde-8b52-56263859b9fb\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:12:58 crc kubenswrapper[4721]: E1202 12:12:58.271595 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:12:58 crc kubenswrapper[4721]: E1202 12:12:58.271619 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-1: configmap "swift-ring-files" not found Dec 02 12:12:58 crc kubenswrapper[4721]: E1202 12:12:58.271630 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:12:58 crc kubenswrapper[4721]: E1202 12:12:58.271658 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-2: configmap "swift-ring-files" not found Dec 02 12:12:58 crc kubenswrapper[4721]: E1202 12:12:58.271671 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b8f4669a-6b01-451f-91cc-2786303375db-etc-swift podName:b8f4669a-6b01-451f-91cc-2786303375db nodeName:}" failed. No retries permitted until 2025-12-02 12:13:00.271654708 +0000 UTC m=+1310.010205057 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b8f4669a-6b01-451f-91cc-2786303375db-etc-swift") pod "swift-storage-1" (UID: "b8f4669a-6b01-451f-91cc-2786303375db") : configmap "swift-ring-files" not found Dec 02 12:12:58 crc kubenswrapper[4721]: E1202 12:12:58.271702 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8bf92b26-9c78-4bde-8b52-56263859b9fb-etc-swift podName:8bf92b26-9c78-4bde-8b52-56263859b9fb nodeName:}" failed. No retries permitted until 2025-12-02 12:13:00.271688109 +0000 UTC m=+1310.010238448 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8bf92b26-9c78-4bde-8b52-56263859b9fb-etc-swift") pod "swift-storage-2" (UID: "8bf92b26-9c78-4bde-8b52-56263859b9fb") : configmap "swift-ring-files" not found Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.197753 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/54394d6a-eb39-44ab-b5d3-a38a393991a9-etc-swift\") pod \"swift-storage-0\" (UID: \"54394d6a-eb39-44ab-b5d3-a38a393991a9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.197883 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/06581551-03b2-4c6a-add5-cfa6216a461c-etc-swift\") pod \"swift-proxy-8b5dc4bcf-zz2cb\" (UID: \"06581551-03b2-4c6a-add5-cfa6216a461c\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" Dec 02 12:13:00 crc kubenswrapper[4721]: E1202 12:13:00.197966 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:13:00 crc kubenswrapper[4721]: E1202 12:13:00.197998 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 02 12:13:00 crc kubenswrapper[4721]: E1202 12:13:00.198025 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:13:00 crc kubenswrapper[4721]: E1202 12:13:00.198038 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb: configmap "swift-ring-files" not found Dec 02 12:13:00 crc kubenswrapper[4721]: E1202 12:13:00.198059 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/54394d6a-eb39-44ab-b5d3-a38a393991a9-etc-swift podName:54394d6a-eb39-44ab-b5d3-a38a393991a9 nodeName:}" failed. No retries permitted until 2025-12-02 12:13:04.198039634 +0000 UTC m=+1313.936589983 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/54394d6a-eb39-44ab-b5d3-a38a393991a9-etc-swift") pod "swift-storage-0" (UID: "54394d6a-eb39-44ab-b5d3-a38a393991a9") : configmap "swift-ring-files" not found Dec 02 12:13:00 crc kubenswrapper[4721]: E1202 12:13:00.198081 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/06581551-03b2-4c6a-add5-cfa6216a461c-etc-swift podName:06581551-03b2-4c6a-add5-cfa6216a461c nodeName:}" failed. No retries permitted until 2025-12-02 12:13:04.198067805 +0000 UTC m=+1313.936618154 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/06581551-03b2-4c6a-add5-cfa6216a461c-etc-swift") pod "swift-proxy-8b5dc4bcf-zz2cb" (UID: "06581551-03b2-4c6a-add5-cfa6216a461c") : configmap "swift-ring-files" not found Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.312667 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8bf92b26-9c78-4bde-8b52-56263859b9fb-etc-swift\") pod \"swift-storage-2\" (UID: \"8bf92b26-9c78-4bde-8b52-56263859b9fb\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:13:00 crc kubenswrapper[4721]: E1202 12:13:00.313315 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:13:00 crc kubenswrapper[4721]: E1202 12:13:00.313361 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-2: configmap "swift-ring-files" not found Dec 02 12:13:00 crc kubenswrapper[4721]: E1202 12:13:00.313441 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8bf92b26-9c78-4bde-8b52-56263859b9fb-etc-swift podName:8bf92b26-9c78-4bde-8b52-56263859b9fb nodeName:}" failed. No retries permitted until 2025-12-02 12:13:04.313398113 +0000 UTC m=+1314.051948462 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8bf92b26-9c78-4bde-8b52-56263859b9fb-etc-swift") pod "swift-storage-2" (UID: "8bf92b26-9c78-4bde-8b52-56263859b9fb") : configmap "swift-ring-files" not found Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.314478 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b8f4669a-6b01-451f-91cc-2786303375db-etc-swift\") pod \"swift-storage-1\" (UID: \"b8f4669a-6b01-451f-91cc-2786303375db\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:13:00 crc kubenswrapper[4721]: E1202 12:13:00.314636 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:13:00 crc kubenswrapper[4721]: E1202 12:13:00.314654 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-1: configmap "swift-ring-files" not found Dec 02 12:13:00 crc kubenswrapper[4721]: E1202 12:13:00.314679 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b8f4669a-6b01-451f-91cc-2786303375db-etc-swift podName:b8f4669a-6b01-451f-91cc-2786303375db nodeName:}" failed. No retries permitted until 2025-12-02 12:13:04.314671352 +0000 UTC m=+1314.053221701 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b8f4669a-6b01-451f-91cc-2786303375db-etc-swift") pod "swift-storage-1" (UID: "b8f4669a-6b01-451f-91cc-2786303375db") : configmap "swift-ring-files" not found Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.354484 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-92kt6"] Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.355662 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-92kt6" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.357831 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.358703 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.364023 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-92kt6"] Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.385424 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-92kt6"] Dec 02 12:13:00 crc kubenswrapper[4721]: E1202 12:13:00.386020 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[dispersionconf etc-swift kube-api-access-g4dbc ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[dispersionconf etc-swift kube-api-access-g4dbc ring-data-devices scripts swiftconf]: context canceled" pod="swift-kuttl-tests/swift-ring-rebalance-92kt6" podUID="a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.401368 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-t8k78"] Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.402401 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-t8k78" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.413537 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-t8k78"] Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.461281 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-92kt6" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.468850 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-92kt6" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.519778 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-scripts\") pod \"swift-ring-rebalance-t8k78\" (UID: \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\") " pod="swift-kuttl-tests/swift-ring-rebalance-t8k78" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.519870 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-ring-data-devices\") pod \"swift-ring-rebalance-t8k78\" (UID: \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\") " pod="swift-kuttl-tests/swift-ring-rebalance-t8k78" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.519935 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-swiftconf\") pod \"swift-ring-rebalance-92kt6\" (UID: \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\") " pod="swift-kuttl-tests/swift-ring-rebalance-92kt6" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.519978 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4dbc\" (UniqueName: \"kubernetes.io/projected/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-kube-api-access-g4dbc\") pod \"swift-ring-rebalance-92kt6\" (UID: \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\") " pod="swift-kuttl-tests/swift-ring-rebalance-92kt6" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.520044 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-etc-swift\") pod \"swift-ring-rebalance-t8k78\" (UID: \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\") " pod="swift-kuttl-tests/swift-ring-rebalance-t8k78" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.520066 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-swiftconf\") pod \"swift-ring-rebalance-t8k78\" (UID: \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\") " pod="swift-kuttl-tests/swift-ring-rebalance-t8k78" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.520109 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-etc-swift\") pod \"swift-ring-rebalance-92kt6\" (UID: \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\") " pod="swift-kuttl-tests/swift-ring-rebalance-92kt6" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.520165 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-dispersionconf\") pod \"swift-ring-rebalance-t8k78\" (UID: \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\") " pod="swift-kuttl-tests/swift-ring-rebalance-t8k78" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.520222 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-ring-data-devices\") pod \"swift-ring-rebalance-92kt6\" (UID: \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\") " pod="swift-kuttl-tests/swift-ring-rebalance-92kt6" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.520291 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtgcg\" (UniqueName: \"kubernetes.io/projected/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-kube-api-access-xtgcg\") pod \"swift-ring-rebalance-t8k78\" (UID: \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\") " pod="swift-kuttl-tests/swift-ring-rebalance-t8k78" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.520314 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-scripts\") pod \"swift-ring-rebalance-92kt6\" (UID: \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\") " pod="swift-kuttl-tests/swift-ring-rebalance-92kt6" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.520333 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-dispersionconf\") pod \"swift-ring-rebalance-92kt6\" (UID: \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\") " pod="swift-kuttl-tests/swift-ring-rebalance-92kt6" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.622216 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-etc-swift\") pod \"swift-ring-rebalance-t8k78\" (UID: \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\") " pod="swift-kuttl-tests/swift-ring-rebalance-t8k78" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.622261 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-swiftconf\") pod \"swift-ring-rebalance-t8k78\" (UID: \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\") " pod="swift-kuttl-tests/swift-ring-rebalance-t8k78" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.622286 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-etc-swift\") pod \"swift-ring-rebalance-92kt6\" (UID: \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\") " pod="swift-kuttl-tests/swift-ring-rebalance-92kt6" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.622317 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-dispersionconf\") pod \"swift-ring-rebalance-t8k78\" (UID: \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\") " pod="swift-kuttl-tests/swift-ring-rebalance-t8k78" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.622367 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-ring-data-devices\") pod \"swift-ring-rebalance-92kt6\" (UID: \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\") " pod="swift-kuttl-tests/swift-ring-rebalance-92kt6" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.622401 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtgcg\" (UniqueName: \"kubernetes.io/projected/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-kube-api-access-xtgcg\") pod \"swift-ring-rebalance-t8k78\" (UID: \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\") " pod="swift-kuttl-tests/swift-ring-rebalance-t8k78" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.622417 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-dispersionconf\") pod \"swift-ring-rebalance-92kt6\" (UID: \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\") " pod="swift-kuttl-tests/swift-ring-rebalance-92kt6" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.622435 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-scripts\") pod \"swift-ring-rebalance-92kt6\" (UID: \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\") " pod="swift-kuttl-tests/swift-ring-rebalance-92kt6" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.622477 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-scripts\") pod \"swift-ring-rebalance-t8k78\" (UID: \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\") " pod="swift-kuttl-tests/swift-ring-rebalance-t8k78" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.622508 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-ring-data-devices\") pod \"swift-ring-rebalance-t8k78\" (UID: \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\") " pod="swift-kuttl-tests/swift-ring-rebalance-t8k78" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.622534 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-swiftconf\") pod \"swift-ring-rebalance-92kt6\" (UID: \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\") " pod="swift-kuttl-tests/swift-ring-rebalance-92kt6" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.622554 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4dbc\" (UniqueName: \"kubernetes.io/projected/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-kube-api-access-g4dbc\") pod \"swift-ring-rebalance-92kt6\" (UID: \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\") " pod="swift-kuttl-tests/swift-ring-rebalance-92kt6" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.622715 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-etc-swift\") pod \"swift-ring-rebalance-92kt6\" (UID: \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\") " pod="swift-kuttl-tests/swift-ring-rebalance-92kt6" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.622717 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-etc-swift\") pod \"swift-ring-rebalance-t8k78\" (UID: \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\") " pod="swift-kuttl-tests/swift-ring-rebalance-t8k78" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.623322 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-scripts\") pod \"swift-ring-rebalance-t8k78\" (UID: \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\") " pod="swift-kuttl-tests/swift-ring-rebalance-t8k78" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.623812 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-ring-data-devices\") pod \"swift-ring-rebalance-92kt6\" (UID: \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\") " pod="swift-kuttl-tests/swift-ring-rebalance-92kt6" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.623837 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-ring-data-devices\") pod \"swift-ring-rebalance-t8k78\" (UID: \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\") " pod="swift-kuttl-tests/swift-ring-rebalance-t8k78" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.623848 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-scripts\") pod \"swift-ring-rebalance-92kt6\" (UID: \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\") " pod="swift-kuttl-tests/swift-ring-rebalance-92kt6" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.627759 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-swiftconf\") pod \"swift-ring-rebalance-t8k78\" (UID: \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\") " pod="swift-kuttl-tests/swift-ring-rebalance-t8k78" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.632696 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-dispersionconf\") pod \"swift-ring-rebalance-92kt6\" (UID: \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\") " pod="swift-kuttl-tests/swift-ring-rebalance-92kt6" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.632698 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-dispersionconf\") pod \"swift-ring-rebalance-t8k78\" (UID: \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\") " pod="swift-kuttl-tests/swift-ring-rebalance-t8k78" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.637054 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-swiftconf\") pod \"swift-ring-rebalance-92kt6\" (UID: \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\") " pod="swift-kuttl-tests/swift-ring-rebalance-92kt6" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.640530 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4dbc\" (UniqueName: \"kubernetes.io/projected/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-kube-api-access-g4dbc\") pod \"swift-ring-rebalance-92kt6\" (UID: \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\") " pod="swift-kuttl-tests/swift-ring-rebalance-92kt6" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.642742 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtgcg\" (UniqueName: \"kubernetes.io/projected/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-kube-api-access-xtgcg\") pod \"swift-ring-rebalance-t8k78\" (UID: \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\") " pod="swift-kuttl-tests/swift-ring-rebalance-t8k78" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.719895 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-t8k78" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.723239 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g4dbc\" (UniqueName: \"kubernetes.io/projected/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-kube-api-access-g4dbc\") pod \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\" (UID: \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\") " Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.723271 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-dispersionconf\") pod \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\" (UID: \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\") " Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.723322 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-ring-data-devices\") pod \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\" (UID: \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\") " Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.723484 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-etc-swift\") pod \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\" (UID: \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\") " Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.723564 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-scripts\") pod \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\" (UID: \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\") " Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.723602 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-swiftconf\") pod \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\" (UID: \"a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51\") " Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.723840 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51" (UID: "a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.724158 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-scripts" (OuterVolumeSpecName: "scripts") pod "a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51" (UID: "a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.724280 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51" (UID: "a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.726376 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51" (UID: "a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.726393 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51" (UID: "a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.726510 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-kube-api-access-g4dbc" (OuterVolumeSpecName: "kube-api-access-g4dbc") pod "a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51" (UID: "a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51"). InnerVolumeSpecName "kube-api-access-g4dbc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.825741 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.825772 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.825784 4721 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.825796 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g4dbc\" (UniqueName: \"kubernetes.io/projected/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-kube-api-access-g4dbc\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.825810 4721 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:00 crc kubenswrapper[4721]: I1202 12:13:00.825820 4721 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:01 crc kubenswrapper[4721]: I1202 12:13:01.170819 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-t8k78"] Dec 02 12:13:01 crc kubenswrapper[4721]: I1202 12:13:01.468739 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-t8k78" event={"ID":"83333d8f-d5df-4ce3-a0eb-107d7ed9a623","Type":"ContainerStarted","Data":"e3595ce08cc19a10f06d9763a02bfd4c692e4315e633abef85a1ba70e959fcda"} Dec 02 12:13:01 crc kubenswrapper[4721]: I1202 12:13:01.468793 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-t8k78" event={"ID":"83333d8f-d5df-4ce3-a0eb-107d7ed9a623","Type":"ContainerStarted","Data":"42bf53b2e1cea32cff90a9f92c5342d32e2898f374e95dd04e1e54b98117a60e"} Dec 02 12:13:01 crc kubenswrapper[4721]: I1202 12:13:01.468760 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-92kt6" Dec 02 12:13:01 crc kubenswrapper[4721]: I1202 12:13:01.486512 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-t8k78" podStartSLOduration=1.4864976140000001 podStartE2EDuration="1.486497614s" podCreationTimestamp="2025-12-02 12:13:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:13:01.482268213 +0000 UTC m=+1311.220818562" watchObservedRunningTime="2025-12-02 12:13:01.486497614 +0000 UTC m=+1311.225047963" Dec 02 12:13:01 crc kubenswrapper[4721]: I1202 12:13:01.526296 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-92kt6"] Dec 02 12:13:01 crc kubenswrapper[4721]: I1202 12:13:01.531692 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-92kt6"] Dec 02 12:13:02 crc kubenswrapper[4721]: I1202 12:13:02.852471 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51" path="/var/lib/kubelet/pods/a5b3fa43-1d1a-428c-a5ce-9d48f2e80c51/volumes" Dec 02 12:13:04 crc kubenswrapper[4721]: I1202 12:13:04.279059 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/54394d6a-eb39-44ab-b5d3-a38a393991a9-etc-swift\") pod \"swift-storage-0\" (UID: \"54394d6a-eb39-44ab-b5d3-a38a393991a9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:13:04 crc kubenswrapper[4721]: E1202 12:13:04.279241 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:13:04 crc kubenswrapper[4721]: E1202 12:13:04.279851 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 02 12:13:04 crc kubenswrapper[4721]: I1202 12:13:04.279864 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/06581551-03b2-4c6a-add5-cfa6216a461c-etc-swift\") pod \"swift-proxy-8b5dc4bcf-zz2cb\" (UID: \"06581551-03b2-4c6a-add5-cfa6216a461c\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" Dec 02 12:13:04 crc kubenswrapper[4721]: E1202 12:13:04.279923 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/54394d6a-eb39-44ab-b5d3-a38a393991a9-etc-swift podName:54394d6a-eb39-44ab-b5d3-a38a393991a9 nodeName:}" failed. No retries permitted until 2025-12-02 12:13:12.279899053 +0000 UTC m=+1322.018449412 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/54394d6a-eb39-44ab-b5d3-a38a393991a9-etc-swift") pod "swift-storage-0" (UID: "54394d6a-eb39-44ab-b5d3-a38a393991a9") : configmap "swift-ring-files" not found Dec 02 12:13:04 crc kubenswrapper[4721]: E1202 12:13:04.280024 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:13:04 crc kubenswrapper[4721]: E1202 12:13:04.280038 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb: configmap "swift-ring-files" not found Dec 02 12:13:04 crc kubenswrapper[4721]: E1202 12:13:04.280085 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/06581551-03b2-4c6a-add5-cfa6216a461c-etc-swift podName:06581551-03b2-4c6a-add5-cfa6216a461c nodeName:}" failed. No retries permitted until 2025-12-02 12:13:12.280069109 +0000 UTC m=+1322.018619458 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/06581551-03b2-4c6a-add5-cfa6216a461c-etc-swift") pod "swift-proxy-8b5dc4bcf-zz2cb" (UID: "06581551-03b2-4c6a-add5-cfa6216a461c") : configmap "swift-ring-files" not found Dec 02 12:13:04 crc kubenswrapper[4721]: I1202 12:13:04.380936 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b8f4669a-6b01-451f-91cc-2786303375db-etc-swift\") pod \"swift-storage-1\" (UID: \"b8f4669a-6b01-451f-91cc-2786303375db\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:13:04 crc kubenswrapper[4721]: I1202 12:13:04.381050 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8bf92b26-9c78-4bde-8b52-56263859b9fb-etc-swift\") pod \"swift-storage-2\" (UID: \"8bf92b26-9c78-4bde-8b52-56263859b9fb\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:13:04 crc kubenswrapper[4721]: E1202 12:13:04.381127 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:13:04 crc kubenswrapper[4721]: E1202 12:13:04.381152 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-1: configmap "swift-ring-files" not found Dec 02 12:13:04 crc kubenswrapper[4721]: E1202 12:13:04.381202 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b8f4669a-6b01-451f-91cc-2786303375db-etc-swift podName:b8f4669a-6b01-451f-91cc-2786303375db nodeName:}" failed. No retries permitted until 2025-12-02 12:13:12.38118358 +0000 UTC m=+1322.119734019 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b8f4669a-6b01-451f-91cc-2786303375db-etc-swift") pod "swift-storage-1" (UID: "b8f4669a-6b01-451f-91cc-2786303375db") : configmap "swift-ring-files" not found Dec 02 12:13:04 crc kubenswrapper[4721]: E1202 12:13:04.381271 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:13:04 crc kubenswrapper[4721]: E1202 12:13:04.381294 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-2: configmap "swift-ring-files" not found Dec 02 12:13:04 crc kubenswrapper[4721]: E1202 12:13:04.381503 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8bf92b26-9c78-4bde-8b52-56263859b9fb-etc-swift podName:8bf92b26-9c78-4bde-8b52-56263859b9fb nodeName:}" failed. No retries permitted until 2025-12-02 12:13:12.381472839 +0000 UTC m=+1322.120023208 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8bf92b26-9c78-4bde-8b52-56263859b9fb-etc-swift") pod "swift-storage-2" (UID: "8bf92b26-9c78-4bde-8b52-56263859b9fb") : configmap "swift-ring-files" not found Dec 02 12:13:11 crc kubenswrapper[4721]: I1202 12:13:11.546213 4721 generic.go:334] "Generic (PLEG): container finished" podID="83333d8f-d5df-4ce3-a0eb-107d7ed9a623" containerID="e3595ce08cc19a10f06d9763a02bfd4c692e4315e633abef85a1ba70e959fcda" exitCode=0 Dec 02 12:13:11 crc kubenswrapper[4721]: I1202 12:13:11.546295 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-t8k78" event={"ID":"83333d8f-d5df-4ce3-a0eb-107d7ed9a623","Type":"ContainerDied","Data":"e3595ce08cc19a10f06d9763a02bfd4c692e4315e633abef85a1ba70e959fcda"} Dec 02 12:13:12 crc kubenswrapper[4721]: I1202 12:13:12.310993 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/06581551-03b2-4c6a-add5-cfa6216a461c-etc-swift\") pod \"swift-proxy-8b5dc4bcf-zz2cb\" (UID: \"06581551-03b2-4c6a-add5-cfa6216a461c\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" Dec 02 12:13:12 crc kubenswrapper[4721]: I1202 12:13:12.311073 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/54394d6a-eb39-44ab-b5d3-a38a393991a9-etc-swift\") pod \"swift-storage-0\" (UID: \"54394d6a-eb39-44ab-b5d3-a38a393991a9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:13:12 crc kubenswrapper[4721]: I1202 12:13:12.317255 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/54394d6a-eb39-44ab-b5d3-a38a393991a9-etc-swift\") pod \"swift-storage-0\" (UID: \"54394d6a-eb39-44ab-b5d3-a38a393991a9\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:13:12 crc kubenswrapper[4721]: I1202 12:13:12.317319 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/06581551-03b2-4c6a-add5-cfa6216a461c-etc-swift\") pod \"swift-proxy-8b5dc4bcf-zz2cb\" (UID: \"06581551-03b2-4c6a-add5-cfa6216a461c\") " pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" Dec 02 12:13:12 crc kubenswrapper[4721]: I1202 12:13:12.366230 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" Dec 02 12:13:12 crc kubenswrapper[4721]: I1202 12:13:12.414128 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b8f4669a-6b01-451f-91cc-2786303375db-etc-swift\") pod \"swift-storage-1\" (UID: \"b8f4669a-6b01-451f-91cc-2786303375db\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:13:12 crc kubenswrapper[4721]: I1202 12:13:12.414209 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8bf92b26-9c78-4bde-8b52-56263859b9fb-etc-swift\") pod \"swift-storage-2\" (UID: \"8bf92b26-9c78-4bde-8b52-56263859b9fb\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:13:12 crc kubenswrapper[4721]: I1202 12:13:12.415472 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:13:12 crc kubenswrapper[4721]: I1202 12:13:12.418133 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8bf92b26-9c78-4bde-8b52-56263859b9fb-etc-swift\") pod \"swift-storage-2\" (UID: \"8bf92b26-9c78-4bde-8b52-56263859b9fb\") " pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:13:12 crc kubenswrapper[4721]: I1202 12:13:12.422757 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b8f4669a-6b01-451f-91cc-2786303375db-etc-swift\") pod \"swift-storage-1\" (UID: \"b8f4669a-6b01-451f-91cc-2786303375db\") " pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:13:12 crc kubenswrapper[4721]: I1202 12:13:12.431163 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:13:12 crc kubenswrapper[4721]: I1202 12:13:12.444920 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:13:12 crc kubenswrapper[4721]: I1202 12:13:12.865561 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb"] Dec 02 12:13:12 crc kubenswrapper[4721]: I1202 12:13:12.942002 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-t8k78" Dec 02 12:13:12 crc kubenswrapper[4721]: I1202 12:13:12.970994 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:13:12 crc kubenswrapper[4721]: W1202 12:13:12.973497 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb8f4669a_6b01_451f_91cc_2786303375db.slice/crio-32f4271625b8b239a24b916d8594bf7119bd560f8fd7fc53af93e927e228f72e WatchSource:0}: Error finding container 32f4271625b8b239a24b916d8594bf7119bd560f8fd7fc53af93e927e228f72e: Status 404 returned error can't find the container with id 32f4271625b8b239a24b916d8594bf7119bd560f8fd7fc53af93e927e228f72e Dec 02 12:13:12 crc kubenswrapper[4721]: I1202 12:13:12.977426 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Dec 02 12:13:12 crc kubenswrapper[4721]: I1202 12:13:12.997875 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Dec 02 12:13:13 crc kubenswrapper[4721]: W1202 12:13:13.018228 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8bf92b26_9c78_4bde_8b52_56263859b9fb.slice/crio-a545696ddb58c034f24b57f6b2da97462f5ca97440bcc7274ab84a166bbdc8f6 WatchSource:0}: Error finding container a545696ddb58c034f24b57f6b2da97462f5ca97440bcc7274ab84a166bbdc8f6: Status 404 returned error can't find the container with id a545696ddb58c034f24b57f6b2da97462f5ca97440bcc7274ab84a166bbdc8f6 Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.020650 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-ring-data-devices\") pod \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\" (UID: \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\") " Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.020719 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xtgcg\" (UniqueName: \"kubernetes.io/projected/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-kube-api-access-xtgcg\") pod \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\" (UID: \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\") " Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.020767 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-scripts\") pod \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\" (UID: \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\") " Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.020849 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-swiftconf\") pod \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\" (UID: \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\") " Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.021258 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-dispersionconf\") pod \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\" (UID: \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\") " Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.021311 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "83333d8f-d5df-4ce3-a0eb-107d7ed9a623" (UID: "83333d8f-d5df-4ce3-a0eb-107d7ed9a623"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.021323 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-etc-swift\") pod \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\" (UID: \"83333d8f-d5df-4ce3-a0eb-107d7ed9a623\") " Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.021937 4721 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.023783 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "83333d8f-d5df-4ce3-a0eb-107d7ed9a623" (UID: "83333d8f-d5df-4ce3-a0eb-107d7ed9a623"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.026684 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-kube-api-access-xtgcg" (OuterVolumeSpecName: "kube-api-access-xtgcg") pod "83333d8f-d5df-4ce3-a0eb-107d7ed9a623" (UID: "83333d8f-d5df-4ce3-a0eb-107d7ed9a623"). InnerVolumeSpecName "kube-api-access-xtgcg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.048043 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "83333d8f-d5df-4ce3-a0eb-107d7ed9a623" (UID: "83333d8f-d5df-4ce3-a0eb-107d7ed9a623"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.054322 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-scripts" (OuterVolumeSpecName: "scripts") pod "83333d8f-d5df-4ce3-a0eb-107d7ed9a623" (UID: "83333d8f-d5df-4ce3-a0eb-107d7ed9a623"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.060715 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "83333d8f-d5df-4ce3-a0eb-107d7ed9a623" (UID: "83333d8f-d5df-4ce3-a0eb-107d7ed9a623"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.122920 4721 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.122954 4721 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.122968 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.122981 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xtgcg\" (UniqueName: \"kubernetes.io/projected/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-kube-api-access-xtgcg\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.122993 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/83333d8f-d5df-4ce3-a0eb-107d7ed9a623-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.612661 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-t8k78" event={"ID":"83333d8f-d5df-4ce3-a0eb-107d7ed9a623","Type":"ContainerDied","Data":"42bf53b2e1cea32cff90a9f92c5342d32e2898f374e95dd04e1e54b98117a60e"} Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.613011 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="42bf53b2e1cea32cff90a9f92c5342d32e2898f374e95dd04e1e54b98117a60e" Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.612689 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-t8k78" Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.633512 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" event={"ID":"06581551-03b2-4c6a-add5-cfa6216a461c","Type":"ContainerStarted","Data":"dd36af16b3f8948d39118eac04affce692461b8fccf54766ba111f311f82cc07"} Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.633554 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" event={"ID":"06581551-03b2-4c6a-add5-cfa6216a461c","Type":"ContainerStarted","Data":"cdc013152076d46bfb55d74405852fea85bf4ac53ddd98ed5ce5742dbf9483b1"} Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.633564 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" event={"ID":"06581551-03b2-4c6a-add5-cfa6216a461c","Type":"ContainerStarted","Data":"e3786827f2ee63a508b3bd7a90ba0e7e00eb0046600115d57fe64e13cff991c3"} Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.635090 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.635117 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.648270 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerStarted","Data":"ff082e662a26aeeea6732d9ad265b9acb9da661a4252a9d3dd52302629db893e"} Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.648315 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerStarted","Data":"3c4eacf0568ae73dd6413ce1561622aa8998c530cf8df7cce6ebe9c2c46d80b0"} Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.648328 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerStarted","Data":"bdbbf47a97448d8e3f16f5aeb238ed4d4f54e04bba167e8432c8d3dffe4602ca"} Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.648341 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerStarted","Data":"32f4271625b8b239a24b916d8594bf7119bd560f8fd7fc53af93e927e228f72e"} Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.654695 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerStarted","Data":"f33070308e90b30d9cdd71f45cf5283d4f08d7eb1d267b4d5c3f4fdb3b88e7c9"} Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.654743 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerStarted","Data":"9a14a12fa0925fa7c29432aa1ea91ad4238ba9bbcd8d6c303a4a6e23d98d992c"} Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.654758 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerStarted","Data":"a545696ddb58c034f24b57f6b2da97462f5ca97440bcc7274ab84a166bbdc8f6"} Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.659665 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" podStartSLOduration=17.659643513 podStartE2EDuration="17.659643513s" podCreationTimestamp="2025-12-02 12:12:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:13:13.652971269 +0000 UTC m=+1323.391521628" watchObservedRunningTime="2025-12-02 12:13:13.659643513 +0000 UTC m=+1323.398193872" Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.670501 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerStarted","Data":"b17edaa3291f4fcc1070e11c3ba8b7ea2370f6138a9e013896a7afcccc514df5"} Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.670548 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerStarted","Data":"1da308cd6e4b57955810b3f16b8850390d22406c5c67f5271af2bfcb6e30b8d5"} Dec 02 12:13:13 crc kubenswrapper[4721]: I1202 12:13:13.670581 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerStarted","Data":"2333852cfcd2e5bad8bf5c55e7b61f14bdc588989ba9ac8d53cc65ea05088dbe"} Dec 02 12:13:14 crc kubenswrapper[4721]: I1202 12:13:14.730962 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerStarted","Data":"ddcdd1ec5d88f5c19c5c33956dd876023076ce8196b01b8b60b11011d978cc74"} Dec 02 12:13:14 crc kubenswrapper[4721]: I1202 12:13:14.731257 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerStarted","Data":"6e9c97886d2281d2db8385c13740c5db13108dbad3318be268566bdf4ef38e60"} Dec 02 12:13:14 crc kubenswrapper[4721]: I1202 12:13:14.731268 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerStarted","Data":"056786e0cc65aa5944f5f7bf4545d4c3cd9ae097250472dddf1c826cb1ac9231"} Dec 02 12:13:14 crc kubenswrapper[4721]: I1202 12:13:14.731276 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerStarted","Data":"3012afa35630159ed0600ebceef3df1eaf4dfb0728ea48837272b66f5dce0c7b"} Dec 02 12:13:14 crc kubenswrapper[4721]: I1202 12:13:14.748213 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerStarted","Data":"a868d42e6fa1c098fc9d1583f04dca38e0be60a59a7c0762d581b1ca1d12bbda"} Dec 02 12:13:14 crc kubenswrapper[4721]: I1202 12:13:14.748255 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerStarted","Data":"3c6328f8519642aa83a494e07195a10428397045d3b8562babcc3bd7fa755d16"} Dec 02 12:13:14 crc kubenswrapper[4721]: I1202 12:13:14.748266 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerStarted","Data":"afc24a08241150352ee3448002c09defeace534d3cdb24c6c11e113df2658865"} Dec 02 12:13:14 crc kubenswrapper[4721]: I1202 12:13:14.748274 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerStarted","Data":"c65cb5a856a1c5496889424f188ddde01dad3f8c58c03eaf04981bec7fcfa362"} Dec 02 12:13:14 crc kubenswrapper[4721]: I1202 12:13:14.751083 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerStarted","Data":"cb4782790aa9a69b2a20327fa4e51e5f4a5f9461eb69c419341fc62846ef8318"} Dec 02 12:13:14 crc kubenswrapper[4721]: I1202 12:13:14.751113 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerStarted","Data":"355bdf529f7f19ac1a7928e73f70afc680d14b7084acc2ed4a8664677b5a38fc"} Dec 02 12:13:14 crc kubenswrapper[4721]: I1202 12:13:14.751121 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerStarted","Data":"5ddea8519471235cd203737898d4a84a22947a858c4161d0cdbc538395b90d59"} Dec 02 12:13:14 crc kubenswrapper[4721]: I1202 12:13:14.751131 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerStarted","Data":"18523f1db324b880b4580224bf2ad72c03020e5940c91f1f4e0d34781b24bbd0"} Dec 02 12:13:15 crc kubenswrapper[4721]: I1202 12:13:15.772936 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerStarted","Data":"fa20ac24780157fa9b52a532ecfc6a01b8357c8e4777d512d203c65148a94f96"} Dec 02 12:13:15 crc kubenswrapper[4721]: I1202 12:13:15.773237 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerStarted","Data":"66c39b5fbb7a7d9d64886db6217c70fa81f42865cdaa296dc478fcca96a41978"} Dec 02 12:13:15 crc kubenswrapper[4721]: I1202 12:13:15.773247 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerStarted","Data":"edea0e2ed18478f285ecaae9e57b77f1fcfb4dfd9b94f3ff54c22bc24b3b5df6"} Dec 02 12:13:15 crc kubenswrapper[4721]: I1202 12:13:15.773255 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerStarted","Data":"19bd7e7242eb220985a6143c5b395a3162c141462dd4414ff8c5f615f49002b1"} Dec 02 12:13:15 crc kubenswrapper[4721]: I1202 12:13:15.773263 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerStarted","Data":"80210a92576ea67e922d78ea7ac8c50c584e2c56cd8467eb85b5b29838f31334"} Dec 02 12:13:15 crc kubenswrapper[4721]: I1202 12:13:15.782486 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerStarted","Data":"ea3dc07b828742ed47d9ee04d67d8b69338eda5d1113da2c4460899c2c5acb37"} Dec 02 12:13:15 crc kubenswrapper[4721]: I1202 12:13:15.782527 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerStarted","Data":"69ab3857ba197ddd18cda3cf08e05c79f4925f822cd01d76d13743798385b8fb"} Dec 02 12:13:15 crc kubenswrapper[4721]: I1202 12:13:15.782543 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerStarted","Data":"97ca0d88fc5a289cee8c0ab0310c5c97915d5ac70a170da304db86445523758a"} Dec 02 12:13:15 crc kubenswrapper[4721]: I1202 12:13:15.782557 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerStarted","Data":"5a42824206ed29faa1e2a17dae669ab6e5d3adea29dd3b5a786383a72fc3adf1"} Dec 02 12:13:15 crc kubenswrapper[4721]: I1202 12:13:15.782568 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerStarted","Data":"38828a33ce455be19b0e43568db4ffd83b44c1da254ef1943cc620bb9f4f65d2"} Dec 02 12:13:15 crc kubenswrapper[4721]: I1202 12:13:15.802967 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerStarted","Data":"a09e88795ce2afc38328685e05ceb0f9eb725d61c3b98331e5658b0322ce0e3a"} Dec 02 12:13:15 crc kubenswrapper[4721]: I1202 12:13:15.803022 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerStarted","Data":"ca8d8914238f7696f9bae1ac633efc7bd5b63e29dac383a6626db4cf4f83dcb2"} Dec 02 12:13:15 crc kubenswrapper[4721]: I1202 12:13:15.803033 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerStarted","Data":"479d0dec2aaa6fd4ccce84dbbf0d0e58053b85167261d3f59757a121591636bc"} Dec 02 12:13:15 crc kubenswrapper[4721]: I1202 12:13:15.803043 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerStarted","Data":"39c5390ffcbe3c0be784db7ef05672025fcf3d952d626304e297e1590718a897"} Dec 02 12:13:15 crc kubenswrapper[4721]: I1202 12:13:15.803055 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerStarted","Data":"37cc4ccb7d8d4a2ff89c5372f3428d8517c9cf30a414c165ecbb4c6f1b5fa94c"} Dec 02 12:13:15 crc kubenswrapper[4721]: I1202 12:13:15.803066 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerStarted","Data":"8363b9e2ac15e6050113b7320c011910acd54ded41353912314d366cc137db51"} Dec 02 12:13:16 crc kubenswrapper[4721]: I1202 12:13:16.815057 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerStarted","Data":"82622747e6978b5288a6f64a0f89c622a9c67f282e2129f1c52ace5096950be4"} Dec 02 12:13:16 crc kubenswrapper[4721]: I1202 12:13:16.815461 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerStarted","Data":"1c7c9162b575caa76db5e414cb7b4015f067355fff84caa7f8ac197367c94e01"} Dec 02 12:13:16 crc kubenswrapper[4721]: I1202 12:13:16.820177 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerStarted","Data":"9f93046a00ab18f63db21c1a5dba48ed7daff1a6b7044e4d1da47b18536102d3"} Dec 02 12:13:16 crc kubenswrapper[4721]: I1202 12:13:16.820206 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerStarted","Data":"17ecf94ee8139633e4482645dfc512d3c308f9254511ea8aab340da5449735b3"} Dec 02 12:13:16 crc kubenswrapper[4721]: I1202 12:13:16.820215 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerStarted","Data":"27cc7a59bb52c7bd6d9de63a62663eb93857c7142b7a434c5f3e8c7bb4b8aa05"} Dec 02 12:13:16 crc kubenswrapper[4721]: I1202 12:13:16.820224 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerStarted","Data":"b13272c7bfd0fb6396c6e0277ad0fe07e2d432578e94c8ecd666fcb45ea827e9"} Dec 02 12:13:16 crc kubenswrapper[4721]: I1202 12:13:16.826094 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerStarted","Data":"a325af09c6f45c8e41a6dbcb452cd9644b277e3ad497298b0240f6c9b7e522ef"} Dec 02 12:13:16 crc kubenswrapper[4721]: I1202 12:13:16.826141 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerStarted","Data":"fa37ab877067b4bd0837ef51174cd21450a84a3f3c933b37297b14c21c679786"} Dec 02 12:13:16 crc kubenswrapper[4721]: I1202 12:13:16.826162 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerStarted","Data":"d0135e82a83e7dd432d606cd974ccad81f99a7740082c819d27aad70f20df54c"} Dec 02 12:13:16 crc kubenswrapper[4721]: I1202 12:13:16.826171 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerStarted","Data":"a42959157e2fd85ce825bcf1a4e95457615a57ce555725522b47bd8f26bf7ac9"} Dec 02 12:13:16 crc kubenswrapper[4721]: I1202 12:13:16.849061 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-storage-1" podStartSLOduration=21.849045617 podStartE2EDuration="21.849045617s" podCreationTimestamp="2025-12-02 12:12:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:13:16.845086745 +0000 UTC m=+1326.583637094" watchObservedRunningTime="2025-12-02 12:13:16.849045617 +0000 UTC m=+1326.587595966" Dec 02 12:13:16 crc kubenswrapper[4721]: I1202 12:13:16.891751 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-storage-0" podStartSLOduration=21.89173395 podStartE2EDuration="21.89173395s" podCreationTimestamp="2025-12-02 12:12:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:13:16.884044734 +0000 UTC m=+1326.622595123" watchObservedRunningTime="2025-12-02 12:13:16.89173395 +0000 UTC m=+1326.630284299" Dec 02 12:13:22 crc kubenswrapper[4721]: I1202 12:13:22.368463 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" Dec 02 12:13:22 crc kubenswrapper[4721]: I1202 12:13:22.369076 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" Dec 02 12:13:22 crc kubenswrapper[4721]: I1202 12:13:22.390465 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-storage-2" podStartSLOduration=27.390424479 podStartE2EDuration="27.390424479s" podCreationTimestamp="2025-12-02 12:12:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:13:16.933087602 +0000 UTC m=+1326.671637961" watchObservedRunningTime="2025-12-02 12:13:22.390424479 +0000 UTC m=+1332.128974828" Dec 02 12:13:23 crc kubenswrapper[4721]: I1202 12:13:23.477252 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-s59cv"] Dec 02 12:13:23 crc kubenswrapper[4721]: E1202 12:13:23.477528 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83333d8f-d5df-4ce3-a0eb-107d7ed9a623" containerName="swift-ring-rebalance" Dec 02 12:13:23 crc kubenswrapper[4721]: I1202 12:13:23.477540 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="83333d8f-d5df-4ce3-a0eb-107d7ed9a623" containerName="swift-ring-rebalance" Dec 02 12:13:23 crc kubenswrapper[4721]: I1202 12:13:23.477711 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="83333d8f-d5df-4ce3-a0eb-107d7ed9a623" containerName="swift-ring-rebalance" Dec 02 12:13:23 crc kubenswrapper[4721]: I1202 12:13:23.478186 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-s59cv" Dec 02 12:13:23 crc kubenswrapper[4721]: I1202 12:13:23.481039 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Dec 02 12:13:23 crc kubenswrapper[4721]: I1202 12:13:23.481144 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Dec 02 12:13:23 crc kubenswrapper[4721]: I1202 12:13:23.501497 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-s59cv"] Dec 02 12:13:23 crc kubenswrapper[4721]: I1202 12:13:23.540229 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/23674525-5f10-4c00-8189-94c9a182f091-etc-swift\") pod \"swift-ring-rebalance-debug-s59cv\" (UID: \"23674525-5f10-4c00-8189-94c9a182f091\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-s59cv" Dec 02 12:13:23 crc kubenswrapper[4721]: I1202 12:13:23.540513 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/23674525-5f10-4c00-8189-94c9a182f091-dispersionconf\") pod \"swift-ring-rebalance-debug-s59cv\" (UID: \"23674525-5f10-4c00-8189-94c9a182f091\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-s59cv" Dec 02 12:13:23 crc kubenswrapper[4721]: I1202 12:13:23.540594 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfvnx\" (UniqueName: \"kubernetes.io/projected/23674525-5f10-4c00-8189-94c9a182f091-kube-api-access-cfvnx\") pod \"swift-ring-rebalance-debug-s59cv\" (UID: \"23674525-5f10-4c00-8189-94c9a182f091\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-s59cv" Dec 02 12:13:23 crc kubenswrapper[4721]: I1202 12:13:23.540689 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/23674525-5f10-4c00-8189-94c9a182f091-swiftconf\") pod \"swift-ring-rebalance-debug-s59cv\" (UID: \"23674525-5f10-4c00-8189-94c9a182f091\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-s59cv" Dec 02 12:13:23 crc kubenswrapper[4721]: I1202 12:13:23.540716 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/23674525-5f10-4c00-8189-94c9a182f091-scripts\") pod \"swift-ring-rebalance-debug-s59cv\" (UID: \"23674525-5f10-4c00-8189-94c9a182f091\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-s59cv" Dec 02 12:13:23 crc kubenswrapper[4721]: I1202 12:13:23.540752 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/23674525-5f10-4c00-8189-94c9a182f091-ring-data-devices\") pod \"swift-ring-rebalance-debug-s59cv\" (UID: \"23674525-5f10-4c00-8189-94c9a182f091\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-s59cv" Dec 02 12:13:23 crc kubenswrapper[4721]: I1202 12:13:23.641633 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/23674525-5f10-4c00-8189-94c9a182f091-dispersionconf\") pod \"swift-ring-rebalance-debug-s59cv\" (UID: \"23674525-5f10-4c00-8189-94c9a182f091\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-s59cv" Dec 02 12:13:23 crc kubenswrapper[4721]: I1202 12:13:23.641692 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfvnx\" (UniqueName: \"kubernetes.io/projected/23674525-5f10-4c00-8189-94c9a182f091-kube-api-access-cfvnx\") pod \"swift-ring-rebalance-debug-s59cv\" (UID: \"23674525-5f10-4c00-8189-94c9a182f091\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-s59cv" Dec 02 12:13:23 crc kubenswrapper[4721]: I1202 12:13:23.641737 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/23674525-5f10-4c00-8189-94c9a182f091-swiftconf\") pod \"swift-ring-rebalance-debug-s59cv\" (UID: \"23674525-5f10-4c00-8189-94c9a182f091\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-s59cv" Dec 02 12:13:23 crc kubenswrapper[4721]: I1202 12:13:23.641761 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/23674525-5f10-4c00-8189-94c9a182f091-scripts\") pod \"swift-ring-rebalance-debug-s59cv\" (UID: \"23674525-5f10-4c00-8189-94c9a182f091\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-s59cv" Dec 02 12:13:23 crc kubenswrapper[4721]: I1202 12:13:23.641786 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/23674525-5f10-4c00-8189-94c9a182f091-ring-data-devices\") pod \"swift-ring-rebalance-debug-s59cv\" (UID: \"23674525-5f10-4c00-8189-94c9a182f091\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-s59cv" Dec 02 12:13:23 crc kubenswrapper[4721]: I1202 12:13:23.641833 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/23674525-5f10-4c00-8189-94c9a182f091-etc-swift\") pod \"swift-ring-rebalance-debug-s59cv\" (UID: \"23674525-5f10-4c00-8189-94c9a182f091\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-s59cv" Dec 02 12:13:23 crc kubenswrapper[4721]: I1202 12:13:23.642323 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/23674525-5f10-4c00-8189-94c9a182f091-etc-swift\") pod \"swift-ring-rebalance-debug-s59cv\" (UID: \"23674525-5f10-4c00-8189-94c9a182f091\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-s59cv" Dec 02 12:13:23 crc kubenswrapper[4721]: I1202 12:13:23.642874 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/23674525-5f10-4c00-8189-94c9a182f091-scripts\") pod \"swift-ring-rebalance-debug-s59cv\" (UID: \"23674525-5f10-4c00-8189-94c9a182f091\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-s59cv" Dec 02 12:13:23 crc kubenswrapper[4721]: I1202 12:13:23.642975 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/23674525-5f10-4c00-8189-94c9a182f091-ring-data-devices\") pod \"swift-ring-rebalance-debug-s59cv\" (UID: \"23674525-5f10-4c00-8189-94c9a182f091\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-s59cv" Dec 02 12:13:23 crc kubenswrapper[4721]: I1202 12:13:23.647949 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/23674525-5f10-4c00-8189-94c9a182f091-dispersionconf\") pod \"swift-ring-rebalance-debug-s59cv\" (UID: \"23674525-5f10-4c00-8189-94c9a182f091\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-s59cv" Dec 02 12:13:23 crc kubenswrapper[4721]: I1202 12:13:23.648051 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/23674525-5f10-4c00-8189-94c9a182f091-swiftconf\") pod \"swift-ring-rebalance-debug-s59cv\" (UID: \"23674525-5f10-4c00-8189-94c9a182f091\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-s59cv" Dec 02 12:13:23 crc kubenswrapper[4721]: I1202 12:13:23.656505 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfvnx\" (UniqueName: \"kubernetes.io/projected/23674525-5f10-4c00-8189-94c9a182f091-kube-api-access-cfvnx\") pod \"swift-ring-rebalance-debug-s59cv\" (UID: \"23674525-5f10-4c00-8189-94c9a182f091\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-s59cv" Dec 02 12:13:23 crc kubenswrapper[4721]: I1202 12:13:23.799540 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-s59cv" Dec 02 12:13:24 crc kubenswrapper[4721]: I1202 12:13:24.246911 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-s59cv"] Dec 02 12:13:24 crc kubenswrapper[4721]: W1202 12:13:24.251871 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod23674525_5f10_4c00_8189_94c9a182f091.slice/crio-9ef408ff04ace3c23fd1c5240872b9f063d31a9203eb14cb7521fe4ce083a777 WatchSource:0}: Error finding container 9ef408ff04ace3c23fd1c5240872b9f063d31a9203eb14cb7521fe4ce083a777: Status 404 returned error can't find the container with id 9ef408ff04ace3c23fd1c5240872b9f063d31a9203eb14cb7521fe4ce083a777 Dec 02 12:13:24 crc kubenswrapper[4721]: I1202 12:13:24.898317 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-s59cv" event={"ID":"23674525-5f10-4c00-8189-94c9a182f091","Type":"ContainerStarted","Data":"c5f99c7caa613835a4cf50d5da28a879b49339f5f129a52f3e7e15dc4644a9d4"} Dec 02 12:13:24 crc kubenswrapper[4721]: I1202 12:13:24.898724 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-s59cv" event={"ID":"23674525-5f10-4c00-8189-94c9a182f091","Type":"ContainerStarted","Data":"9ef408ff04ace3c23fd1c5240872b9f063d31a9203eb14cb7521fe4ce083a777"} Dec 02 12:13:24 crc kubenswrapper[4721]: I1202 12:13:24.920633 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-debug-s59cv" podStartSLOduration=1.920616611 podStartE2EDuration="1.920616611s" podCreationTimestamp="2025-12-02 12:13:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:13:24.916145144 +0000 UTC m=+1334.654695503" watchObservedRunningTime="2025-12-02 12:13:24.920616611 +0000 UTC m=+1334.659166960" Dec 02 12:13:26 crc kubenswrapper[4721]: I1202 12:13:26.917304 4721 generic.go:334] "Generic (PLEG): container finished" podID="23674525-5f10-4c00-8189-94c9a182f091" containerID="c5f99c7caa613835a4cf50d5da28a879b49339f5f129a52f3e7e15dc4644a9d4" exitCode=0 Dec 02 12:13:26 crc kubenswrapper[4721]: I1202 12:13:26.917397 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-s59cv" event={"ID":"23674525-5f10-4c00-8189-94c9a182f091","Type":"ContainerDied","Data":"c5f99c7caa613835a4cf50d5da28a879b49339f5f129a52f3e7e15dc4644a9d4"} Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.232015 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-s59cv" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.283465 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-s59cv"] Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.290464 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-s59cv"] Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.317645 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/23674525-5f10-4c00-8189-94c9a182f091-dispersionconf\") pod \"23674525-5f10-4c00-8189-94c9a182f091\" (UID: \"23674525-5f10-4c00-8189-94c9a182f091\") " Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.317704 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/23674525-5f10-4c00-8189-94c9a182f091-ring-data-devices\") pod \"23674525-5f10-4c00-8189-94c9a182f091\" (UID: \"23674525-5f10-4c00-8189-94c9a182f091\") " Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.317764 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfvnx\" (UniqueName: \"kubernetes.io/projected/23674525-5f10-4c00-8189-94c9a182f091-kube-api-access-cfvnx\") pod \"23674525-5f10-4c00-8189-94c9a182f091\" (UID: \"23674525-5f10-4c00-8189-94c9a182f091\") " Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.317832 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/23674525-5f10-4c00-8189-94c9a182f091-etc-swift\") pod \"23674525-5f10-4c00-8189-94c9a182f091\" (UID: \"23674525-5f10-4c00-8189-94c9a182f091\") " Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.317947 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/23674525-5f10-4c00-8189-94c9a182f091-scripts\") pod \"23674525-5f10-4c00-8189-94c9a182f091\" (UID: \"23674525-5f10-4c00-8189-94c9a182f091\") " Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.318004 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/23674525-5f10-4c00-8189-94c9a182f091-swiftconf\") pod \"23674525-5f10-4c00-8189-94c9a182f091\" (UID: \"23674525-5f10-4c00-8189-94c9a182f091\") " Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.318833 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23674525-5f10-4c00-8189-94c9a182f091-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "23674525-5f10-4c00-8189-94c9a182f091" (UID: "23674525-5f10-4c00-8189-94c9a182f091"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.319021 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23674525-5f10-4c00-8189-94c9a182f091-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "23674525-5f10-4c00-8189-94c9a182f091" (UID: "23674525-5f10-4c00-8189-94c9a182f091"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.323706 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23674525-5f10-4c00-8189-94c9a182f091-kube-api-access-cfvnx" (OuterVolumeSpecName: "kube-api-access-cfvnx") pod "23674525-5f10-4c00-8189-94c9a182f091" (UID: "23674525-5f10-4c00-8189-94c9a182f091"). InnerVolumeSpecName "kube-api-access-cfvnx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.341986 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23674525-5f10-4c00-8189-94c9a182f091-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "23674525-5f10-4c00-8189-94c9a182f091" (UID: "23674525-5f10-4c00-8189-94c9a182f091"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.351216 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23674525-5f10-4c00-8189-94c9a182f091-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "23674525-5f10-4c00-8189-94c9a182f091" (UID: "23674525-5f10-4c00-8189-94c9a182f091"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.362440 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23674525-5f10-4c00-8189-94c9a182f091-scripts" (OuterVolumeSpecName: "scripts") pod "23674525-5f10-4c00-8189-94c9a182f091" (UID: "23674525-5f10-4c00-8189-94c9a182f091"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.420674 4721 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/23674525-5f10-4c00-8189-94c9a182f091-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.420724 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfvnx\" (UniqueName: \"kubernetes.io/projected/23674525-5f10-4c00-8189-94c9a182f091-kube-api-access-cfvnx\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.420740 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/23674525-5f10-4c00-8189-94c9a182f091-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.420752 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/23674525-5f10-4c00-8189-94c9a182f091-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.420765 4721 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/23674525-5f10-4c00-8189-94c9a182f091-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.420776 4721 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/23674525-5f10-4c00-8189-94c9a182f091-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.450669 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-895z9"] Dec 02 12:13:28 crc kubenswrapper[4721]: E1202 12:13:28.451160 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23674525-5f10-4c00-8189-94c9a182f091" containerName="swift-ring-rebalance" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.451177 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="23674525-5f10-4c00-8189-94c9a182f091" containerName="swift-ring-rebalance" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.451421 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="23674525-5f10-4c00-8189-94c9a182f091" containerName="swift-ring-rebalance" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.452194 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-895z9" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.466652 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-895z9"] Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.522269 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/60f04f7a-c487-4640-84d3-b44dad129c38-swiftconf\") pod \"swift-ring-rebalance-debug-895z9\" (UID: \"60f04f7a-c487-4640-84d3-b44dad129c38\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-895z9" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.522914 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvmgt\" (UniqueName: \"kubernetes.io/projected/60f04f7a-c487-4640-84d3-b44dad129c38-kube-api-access-xvmgt\") pod \"swift-ring-rebalance-debug-895z9\" (UID: \"60f04f7a-c487-4640-84d3-b44dad129c38\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-895z9" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.522988 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/60f04f7a-c487-4640-84d3-b44dad129c38-dispersionconf\") pod \"swift-ring-rebalance-debug-895z9\" (UID: \"60f04f7a-c487-4640-84d3-b44dad129c38\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-895z9" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.523043 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/60f04f7a-c487-4640-84d3-b44dad129c38-etc-swift\") pod \"swift-ring-rebalance-debug-895z9\" (UID: \"60f04f7a-c487-4640-84d3-b44dad129c38\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-895z9" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.523156 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/60f04f7a-c487-4640-84d3-b44dad129c38-scripts\") pod \"swift-ring-rebalance-debug-895z9\" (UID: \"60f04f7a-c487-4640-84d3-b44dad129c38\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-895z9" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.523190 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/60f04f7a-c487-4640-84d3-b44dad129c38-ring-data-devices\") pod \"swift-ring-rebalance-debug-895z9\" (UID: \"60f04f7a-c487-4640-84d3-b44dad129c38\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-895z9" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.624641 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/60f04f7a-c487-4640-84d3-b44dad129c38-scripts\") pod \"swift-ring-rebalance-debug-895z9\" (UID: \"60f04f7a-c487-4640-84d3-b44dad129c38\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-895z9" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.625319 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/60f04f7a-c487-4640-84d3-b44dad129c38-scripts\") pod \"swift-ring-rebalance-debug-895z9\" (UID: \"60f04f7a-c487-4640-84d3-b44dad129c38\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-895z9" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.625435 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/60f04f7a-c487-4640-84d3-b44dad129c38-ring-data-devices\") pod \"swift-ring-rebalance-debug-895z9\" (UID: \"60f04f7a-c487-4640-84d3-b44dad129c38\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-895z9" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.625464 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/60f04f7a-c487-4640-84d3-b44dad129c38-swiftconf\") pod \"swift-ring-rebalance-debug-895z9\" (UID: \"60f04f7a-c487-4640-84d3-b44dad129c38\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-895z9" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.625882 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/60f04f7a-c487-4640-84d3-b44dad129c38-ring-data-devices\") pod \"swift-ring-rebalance-debug-895z9\" (UID: \"60f04f7a-c487-4640-84d3-b44dad129c38\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-895z9" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.625939 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvmgt\" (UniqueName: \"kubernetes.io/projected/60f04f7a-c487-4640-84d3-b44dad129c38-kube-api-access-xvmgt\") pod \"swift-ring-rebalance-debug-895z9\" (UID: \"60f04f7a-c487-4640-84d3-b44dad129c38\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-895z9" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.626261 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/60f04f7a-c487-4640-84d3-b44dad129c38-dispersionconf\") pod \"swift-ring-rebalance-debug-895z9\" (UID: \"60f04f7a-c487-4640-84d3-b44dad129c38\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-895z9" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.626603 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/60f04f7a-c487-4640-84d3-b44dad129c38-etc-swift\") pod \"swift-ring-rebalance-debug-895z9\" (UID: \"60f04f7a-c487-4640-84d3-b44dad129c38\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-895z9" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.627475 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/60f04f7a-c487-4640-84d3-b44dad129c38-etc-swift\") pod \"swift-ring-rebalance-debug-895z9\" (UID: \"60f04f7a-c487-4640-84d3-b44dad129c38\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-895z9" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.629945 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/60f04f7a-c487-4640-84d3-b44dad129c38-swiftconf\") pod \"swift-ring-rebalance-debug-895z9\" (UID: \"60f04f7a-c487-4640-84d3-b44dad129c38\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-895z9" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.632434 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/60f04f7a-c487-4640-84d3-b44dad129c38-dispersionconf\") pod \"swift-ring-rebalance-debug-895z9\" (UID: \"60f04f7a-c487-4640-84d3-b44dad129c38\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-895z9" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.642384 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvmgt\" (UniqueName: \"kubernetes.io/projected/60f04f7a-c487-4640-84d3-b44dad129c38-kube-api-access-xvmgt\") pod \"swift-ring-rebalance-debug-895z9\" (UID: \"60f04f7a-c487-4640-84d3-b44dad129c38\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-895z9" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.769299 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-895z9" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.854179 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23674525-5f10-4c00-8189-94c9a182f091" path="/var/lib/kubelet/pods/23674525-5f10-4c00-8189-94c9a182f091/volumes" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.950560 4721 scope.go:117] "RemoveContainer" containerID="c5f99c7caa613835a4cf50d5da28a879b49339f5f129a52f3e7e15dc4644a9d4" Dec 02 12:13:28 crc kubenswrapper[4721]: I1202 12:13:28.950632 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-s59cv" Dec 02 12:13:29 crc kubenswrapper[4721]: I1202 12:13:29.270784 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-895z9"] Dec 02 12:13:29 crc kubenswrapper[4721]: W1202 12:13:29.283213 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod60f04f7a_c487_4640_84d3_b44dad129c38.slice/crio-667c56c557035deffb854dde9cfc85133da85e803aed25a66ff53bbcff069637 WatchSource:0}: Error finding container 667c56c557035deffb854dde9cfc85133da85e803aed25a66ff53bbcff069637: Status 404 returned error can't find the container with id 667c56c557035deffb854dde9cfc85133da85e803aed25a66ff53bbcff069637 Dec 02 12:13:29 crc kubenswrapper[4721]: I1202 12:13:29.969080 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-895z9" event={"ID":"60f04f7a-c487-4640-84d3-b44dad129c38","Type":"ContainerStarted","Data":"8075ed09bcab06c0a231c1cabf3e84c96324fed88d1662cf29bb85487628bcf7"} Dec 02 12:13:29 crc kubenswrapper[4721]: I1202 12:13:29.969442 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-895z9" event={"ID":"60f04f7a-c487-4640-84d3-b44dad129c38","Type":"ContainerStarted","Data":"667c56c557035deffb854dde9cfc85133da85e803aed25a66ff53bbcff069637"} Dec 02 12:13:29 crc kubenswrapper[4721]: I1202 12:13:29.997090 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-debug-895z9" podStartSLOduration=1.99707411 podStartE2EDuration="1.99707411s" podCreationTimestamp="2025-12-02 12:13:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:13:29.991189579 +0000 UTC m=+1339.729739938" watchObservedRunningTime="2025-12-02 12:13:29.99707411 +0000 UTC m=+1339.735624459" Dec 02 12:13:31 crc kubenswrapper[4721]: I1202 12:13:31.991866 4721 generic.go:334] "Generic (PLEG): container finished" podID="60f04f7a-c487-4640-84d3-b44dad129c38" containerID="8075ed09bcab06c0a231c1cabf3e84c96324fed88d1662cf29bb85487628bcf7" exitCode=0 Dec 02 12:13:31 crc kubenswrapper[4721]: I1202 12:13:31.991916 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-895z9" event={"ID":"60f04f7a-c487-4640-84d3-b44dad129c38","Type":"ContainerDied","Data":"8075ed09bcab06c0a231c1cabf3e84c96324fed88d1662cf29bb85487628bcf7"} Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.270600 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-895z9" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.307943 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-895z9"] Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.310172 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/60f04f7a-c487-4640-84d3-b44dad129c38-etc-swift\") pod \"60f04f7a-c487-4640-84d3-b44dad129c38\" (UID: \"60f04f7a-c487-4640-84d3-b44dad129c38\") " Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.310273 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xvmgt\" (UniqueName: \"kubernetes.io/projected/60f04f7a-c487-4640-84d3-b44dad129c38-kube-api-access-xvmgt\") pod \"60f04f7a-c487-4640-84d3-b44dad129c38\" (UID: \"60f04f7a-c487-4640-84d3-b44dad129c38\") " Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.310312 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/60f04f7a-c487-4640-84d3-b44dad129c38-dispersionconf\") pod \"60f04f7a-c487-4640-84d3-b44dad129c38\" (UID: \"60f04f7a-c487-4640-84d3-b44dad129c38\") " Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.310393 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/60f04f7a-c487-4640-84d3-b44dad129c38-swiftconf\") pod \"60f04f7a-c487-4640-84d3-b44dad129c38\" (UID: \"60f04f7a-c487-4640-84d3-b44dad129c38\") " Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.310422 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/60f04f7a-c487-4640-84d3-b44dad129c38-scripts\") pod \"60f04f7a-c487-4640-84d3-b44dad129c38\" (UID: \"60f04f7a-c487-4640-84d3-b44dad129c38\") " Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.310515 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/60f04f7a-c487-4640-84d3-b44dad129c38-ring-data-devices\") pod \"60f04f7a-c487-4640-84d3-b44dad129c38\" (UID: \"60f04f7a-c487-4640-84d3-b44dad129c38\") " Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.311066 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60f04f7a-c487-4640-84d3-b44dad129c38-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "60f04f7a-c487-4640-84d3-b44dad129c38" (UID: "60f04f7a-c487-4640-84d3-b44dad129c38"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.311308 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/60f04f7a-c487-4640-84d3-b44dad129c38-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "60f04f7a-c487-4640-84d3-b44dad129c38" (UID: "60f04f7a-c487-4640-84d3-b44dad129c38"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.311767 4721 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/60f04f7a-c487-4640-84d3-b44dad129c38-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.311790 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/60f04f7a-c487-4640-84d3-b44dad129c38-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.320909 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60f04f7a-c487-4640-84d3-b44dad129c38-kube-api-access-xvmgt" (OuterVolumeSpecName: "kube-api-access-xvmgt") pod "60f04f7a-c487-4640-84d3-b44dad129c38" (UID: "60f04f7a-c487-4640-84d3-b44dad129c38"). InnerVolumeSpecName "kube-api-access-xvmgt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.328807 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-895z9"] Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.331554 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60f04f7a-c487-4640-84d3-b44dad129c38-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "60f04f7a-c487-4640-84d3-b44dad129c38" (UID: "60f04f7a-c487-4640-84d3-b44dad129c38"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.331607 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60f04f7a-c487-4640-84d3-b44dad129c38-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "60f04f7a-c487-4640-84d3-b44dad129c38" (UID: "60f04f7a-c487-4640-84d3-b44dad129c38"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.332125 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/60f04f7a-c487-4640-84d3-b44dad129c38-scripts" (OuterVolumeSpecName: "scripts") pod "60f04f7a-c487-4640-84d3-b44dad129c38" (UID: "60f04f7a-c487-4640-84d3-b44dad129c38"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.413905 4721 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/60f04f7a-c487-4640-84d3-b44dad129c38-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.413956 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/60f04f7a-c487-4640-84d3-b44dad129c38-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.413975 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xvmgt\" (UniqueName: \"kubernetes.io/projected/60f04f7a-c487-4640-84d3-b44dad129c38-kube-api-access-xvmgt\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.413997 4721 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/60f04f7a-c487-4640-84d3-b44dad129c38-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.685177 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-kg84k"] Dec 02 12:13:33 crc kubenswrapper[4721]: E1202 12:13:33.685936 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60f04f7a-c487-4640-84d3-b44dad129c38" containerName="swift-ring-rebalance" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.685956 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="60f04f7a-c487-4640-84d3-b44dad129c38" containerName="swift-ring-rebalance" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.686100 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="60f04f7a-c487-4640-84d3-b44dad129c38" containerName="swift-ring-rebalance" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.686603 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-kg84k" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.699863 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-kg84k"] Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.718242 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d8256aec-df1d-4261-8810-28b35906db97-scripts\") pod \"swift-ring-rebalance-debug-kg84k\" (UID: \"d8256aec-df1d-4261-8810-28b35906db97\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-kg84k" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.718289 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d8256aec-df1d-4261-8810-28b35906db97-etc-swift\") pod \"swift-ring-rebalance-debug-kg84k\" (UID: \"d8256aec-df1d-4261-8810-28b35906db97\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-kg84k" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.718398 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d8256aec-df1d-4261-8810-28b35906db97-dispersionconf\") pod \"swift-ring-rebalance-debug-kg84k\" (UID: \"d8256aec-df1d-4261-8810-28b35906db97\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-kg84k" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.718430 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d8256aec-df1d-4261-8810-28b35906db97-swiftconf\") pod \"swift-ring-rebalance-debug-kg84k\" (UID: \"d8256aec-df1d-4261-8810-28b35906db97\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-kg84k" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.718475 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d8256aec-df1d-4261-8810-28b35906db97-ring-data-devices\") pod \"swift-ring-rebalance-debug-kg84k\" (UID: \"d8256aec-df1d-4261-8810-28b35906db97\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-kg84k" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.718498 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92rfl\" (UniqueName: \"kubernetes.io/projected/d8256aec-df1d-4261-8810-28b35906db97-kube-api-access-92rfl\") pod \"swift-ring-rebalance-debug-kg84k\" (UID: \"d8256aec-df1d-4261-8810-28b35906db97\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-kg84k" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.820243 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d8256aec-df1d-4261-8810-28b35906db97-etc-swift\") pod \"swift-ring-rebalance-debug-kg84k\" (UID: \"d8256aec-df1d-4261-8810-28b35906db97\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-kg84k" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.820309 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d8256aec-df1d-4261-8810-28b35906db97-scripts\") pod \"swift-ring-rebalance-debug-kg84k\" (UID: \"d8256aec-df1d-4261-8810-28b35906db97\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-kg84k" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.820420 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d8256aec-df1d-4261-8810-28b35906db97-dispersionconf\") pod \"swift-ring-rebalance-debug-kg84k\" (UID: \"d8256aec-df1d-4261-8810-28b35906db97\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-kg84k" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.820469 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d8256aec-df1d-4261-8810-28b35906db97-swiftconf\") pod \"swift-ring-rebalance-debug-kg84k\" (UID: \"d8256aec-df1d-4261-8810-28b35906db97\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-kg84k" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.820531 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d8256aec-df1d-4261-8810-28b35906db97-ring-data-devices\") pod \"swift-ring-rebalance-debug-kg84k\" (UID: \"d8256aec-df1d-4261-8810-28b35906db97\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-kg84k" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.820566 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92rfl\" (UniqueName: \"kubernetes.io/projected/d8256aec-df1d-4261-8810-28b35906db97-kube-api-access-92rfl\") pod \"swift-ring-rebalance-debug-kg84k\" (UID: \"d8256aec-df1d-4261-8810-28b35906db97\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-kg84k" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.820702 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d8256aec-df1d-4261-8810-28b35906db97-etc-swift\") pod \"swift-ring-rebalance-debug-kg84k\" (UID: \"d8256aec-df1d-4261-8810-28b35906db97\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-kg84k" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.821034 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d8256aec-df1d-4261-8810-28b35906db97-scripts\") pod \"swift-ring-rebalance-debug-kg84k\" (UID: \"d8256aec-df1d-4261-8810-28b35906db97\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-kg84k" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.821189 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d8256aec-df1d-4261-8810-28b35906db97-ring-data-devices\") pod \"swift-ring-rebalance-debug-kg84k\" (UID: \"d8256aec-df1d-4261-8810-28b35906db97\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-kg84k" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.825272 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d8256aec-df1d-4261-8810-28b35906db97-dispersionconf\") pod \"swift-ring-rebalance-debug-kg84k\" (UID: \"d8256aec-df1d-4261-8810-28b35906db97\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-kg84k" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.825710 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d8256aec-df1d-4261-8810-28b35906db97-swiftconf\") pod \"swift-ring-rebalance-debug-kg84k\" (UID: \"d8256aec-df1d-4261-8810-28b35906db97\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-kg84k" Dec 02 12:13:33 crc kubenswrapper[4721]: I1202 12:13:33.835410 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92rfl\" (UniqueName: \"kubernetes.io/projected/d8256aec-df1d-4261-8810-28b35906db97-kube-api-access-92rfl\") pod \"swift-ring-rebalance-debug-kg84k\" (UID: \"d8256aec-df1d-4261-8810-28b35906db97\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-kg84k" Dec 02 12:13:34 crc kubenswrapper[4721]: I1202 12:13:34.001216 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-kg84k" Dec 02 12:13:34 crc kubenswrapper[4721]: I1202 12:13:34.008847 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="667c56c557035deffb854dde9cfc85133da85e803aed25a66ff53bbcff069637" Dec 02 12:13:34 crc kubenswrapper[4721]: I1202 12:13:34.008866 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-895z9" Dec 02 12:13:34 crc kubenswrapper[4721]: W1202 12:13:34.413179 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd8256aec_df1d_4261_8810_28b35906db97.slice/crio-11f00f2ed4e77f96991a26bdbca3d8efc1d7041ca1d49c0a0488f0ee052f2d0f WatchSource:0}: Error finding container 11f00f2ed4e77f96991a26bdbca3d8efc1d7041ca1d49c0a0488f0ee052f2d0f: Status 404 returned error can't find the container with id 11f00f2ed4e77f96991a26bdbca3d8efc1d7041ca1d49c0a0488f0ee052f2d0f Dec 02 12:13:34 crc kubenswrapper[4721]: I1202 12:13:34.417629 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-kg84k"] Dec 02 12:13:34 crc kubenswrapper[4721]: I1202 12:13:34.848993 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60f04f7a-c487-4640-84d3-b44dad129c38" path="/var/lib/kubelet/pods/60f04f7a-c487-4640-84d3-b44dad129c38/volumes" Dec 02 12:13:35 crc kubenswrapper[4721]: I1202 12:13:35.016896 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-kg84k" event={"ID":"d8256aec-df1d-4261-8810-28b35906db97","Type":"ContainerStarted","Data":"80a25188c3a297462774a26f2f40dcb50a669d76b2758a866aa8379d3f1d83f2"} Dec 02 12:13:35 crc kubenswrapper[4721]: I1202 12:13:35.016946 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-kg84k" event={"ID":"d8256aec-df1d-4261-8810-28b35906db97","Type":"ContainerStarted","Data":"11f00f2ed4e77f96991a26bdbca3d8efc1d7041ca1d49c0a0488f0ee052f2d0f"} Dec 02 12:13:35 crc kubenswrapper[4721]: I1202 12:13:35.054560 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-debug-kg84k" podStartSLOduration=2.054539054 podStartE2EDuration="2.054539054s" podCreationTimestamp="2025-12-02 12:13:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:13:35.036721276 +0000 UTC m=+1344.775271625" watchObservedRunningTime="2025-12-02 12:13:35.054539054 +0000 UTC m=+1344.793089403" Dec 02 12:13:36 crc kubenswrapper[4721]: I1202 12:13:36.025674 4721 generic.go:334] "Generic (PLEG): container finished" podID="d8256aec-df1d-4261-8810-28b35906db97" containerID="80a25188c3a297462774a26f2f40dcb50a669d76b2758a866aa8379d3f1d83f2" exitCode=0 Dec 02 12:13:36 crc kubenswrapper[4721]: I1202 12:13:36.025727 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-kg84k" event={"ID":"d8256aec-df1d-4261-8810-28b35906db97","Type":"ContainerDied","Data":"80a25188c3a297462774a26f2f40dcb50a669d76b2758a866aa8379d3f1d83f2"} Dec 02 12:13:37 crc kubenswrapper[4721]: I1202 12:13:37.297435 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-kg84k" Dec 02 12:13:37 crc kubenswrapper[4721]: I1202 12:13:37.335915 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-kg84k"] Dec 02 12:13:37 crc kubenswrapper[4721]: I1202 12:13:37.344064 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-kg84k"] Dec 02 12:13:37 crc kubenswrapper[4721]: I1202 12:13:37.370964 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d8256aec-df1d-4261-8810-28b35906db97-ring-data-devices\") pod \"d8256aec-df1d-4261-8810-28b35906db97\" (UID: \"d8256aec-df1d-4261-8810-28b35906db97\") " Dec 02 12:13:37 crc kubenswrapper[4721]: I1202 12:13:37.371067 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d8256aec-df1d-4261-8810-28b35906db97-dispersionconf\") pod \"d8256aec-df1d-4261-8810-28b35906db97\" (UID: \"d8256aec-df1d-4261-8810-28b35906db97\") " Dec 02 12:13:37 crc kubenswrapper[4721]: I1202 12:13:37.371197 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d8256aec-df1d-4261-8810-28b35906db97-scripts\") pod \"d8256aec-df1d-4261-8810-28b35906db97\" (UID: \"d8256aec-df1d-4261-8810-28b35906db97\") " Dec 02 12:13:37 crc kubenswrapper[4721]: I1202 12:13:37.371225 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d8256aec-df1d-4261-8810-28b35906db97-swiftconf\") pod \"d8256aec-df1d-4261-8810-28b35906db97\" (UID: \"d8256aec-df1d-4261-8810-28b35906db97\") " Dec 02 12:13:37 crc kubenswrapper[4721]: I1202 12:13:37.371278 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d8256aec-df1d-4261-8810-28b35906db97-etc-swift\") pod \"d8256aec-df1d-4261-8810-28b35906db97\" (UID: \"d8256aec-df1d-4261-8810-28b35906db97\") " Dec 02 12:13:37 crc kubenswrapper[4721]: I1202 12:13:37.371306 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-92rfl\" (UniqueName: \"kubernetes.io/projected/d8256aec-df1d-4261-8810-28b35906db97-kube-api-access-92rfl\") pod \"d8256aec-df1d-4261-8810-28b35906db97\" (UID: \"d8256aec-df1d-4261-8810-28b35906db97\") " Dec 02 12:13:37 crc kubenswrapper[4721]: I1202 12:13:37.371526 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d8256aec-df1d-4261-8810-28b35906db97-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "d8256aec-df1d-4261-8810-28b35906db97" (UID: "d8256aec-df1d-4261-8810-28b35906db97"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:13:37 crc kubenswrapper[4721]: I1202 12:13:37.371742 4721 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d8256aec-df1d-4261-8810-28b35906db97-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:37 crc kubenswrapper[4721]: I1202 12:13:37.372197 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8256aec-df1d-4261-8810-28b35906db97-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "d8256aec-df1d-4261-8810-28b35906db97" (UID: "d8256aec-df1d-4261-8810-28b35906db97"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:13:37 crc kubenswrapper[4721]: I1202 12:13:37.384169 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8256aec-df1d-4261-8810-28b35906db97-kube-api-access-92rfl" (OuterVolumeSpecName: "kube-api-access-92rfl") pod "d8256aec-df1d-4261-8810-28b35906db97" (UID: "d8256aec-df1d-4261-8810-28b35906db97"). InnerVolumeSpecName "kube-api-access-92rfl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:13:37 crc kubenswrapper[4721]: I1202 12:13:37.391144 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d8256aec-df1d-4261-8810-28b35906db97-scripts" (OuterVolumeSpecName: "scripts") pod "d8256aec-df1d-4261-8810-28b35906db97" (UID: "d8256aec-df1d-4261-8810-28b35906db97"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:13:37 crc kubenswrapper[4721]: I1202 12:13:37.392736 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8256aec-df1d-4261-8810-28b35906db97-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "d8256aec-df1d-4261-8810-28b35906db97" (UID: "d8256aec-df1d-4261-8810-28b35906db97"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:13:37 crc kubenswrapper[4721]: I1202 12:13:37.394032 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8256aec-df1d-4261-8810-28b35906db97-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "d8256aec-df1d-4261-8810-28b35906db97" (UID: "d8256aec-df1d-4261-8810-28b35906db97"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:13:37 crc kubenswrapper[4721]: I1202 12:13:37.473485 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d8256aec-df1d-4261-8810-28b35906db97-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:37 crc kubenswrapper[4721]: I1202 12:13:37.473515 4721 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d8256aec-df1d-4261-8810-28b35906db97-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:37 crc kubenswrapper[4721]: I1202 12:13:37.473549 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d8256aec-df1d-4261-8810-28b35906db97-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:37 crc kubenswrapper[4721]: I1202 12:13:37.473560 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-92rfl\" (UniqueName: \"kubernetes.io/projected/d8256aec-df1d-4261-8810-28b35906db97-kube-api-access-92rfl\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:37 crc kubenswrapper[4721]: I1202 12:13:37.473572 4721 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d8256aec-df1d-4261-8810-28b35906db97-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:38 crc kubenswrapper[4721]: I1202 12:13:38.041663 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="11f00f2ed4e77f96991a26bdbca3d8efc1d7041ca1d49c0a0488f0ee052f2d0f" Dec 02 12:13:38 crc kubenswrapper[4721]: I1202 12:13:38.041739 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-kg84k" Dec 02 12:13:38 crc kubenswrapper[4721]: I1202 12:13:38.533895 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c"] Dec 02 12:13:38 crc kubenswrapper[4721]: E1202 12:13:38.534996 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8256aec-df1d-4261-8810-28b35906db97" containerName="swift-ring-rebalance" Dec 02 12:13:38 crc kubenswrapper[4721]: I1202 12:13:38.535065 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8256aec-df1d-4261-8810-28b35906db97" containerName="swift-ring-rebalance" Dec 02 12:13:38 crc kubenswrapper[4721]: I1202 12:13:38.535392 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8256aec-df1d-4261-8810-28b35906db97" containerName="swift-ring-rebalance" Dec 02 12:13:38 crc kubenswrapper[4721]: I1202 12:13:38.535969 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c" Dec 02 12:13:38 crc kubenswrapper[4721]: I1202 12:13:38.540016 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Dec 02 12:13:38 crc kubenswrapper[4721]: I1202 12:13:38.540063 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Dec 02 12:13:38 crc kubenswrapper[4721]: I1202 12:13:38.544032 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c"] Dec 02 12:13:38 crc kubenswrapper[4721]: I1202 12:13:38.590010 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-ring-data-devices\") pod \"swift-ring-rebalance-debug-hmt5c\" (UID: \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c" Dec 02 12:13:38 crc kubenswrapper[4721]: I1202 12:13:38.590062 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-scripts\") pod \"swift-ring-rebalance-debug-hmt5c\" (UID: \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c" Dec 02 12:13:38 crc kubenswrapper[4721]: I1202 12:13:38.590126 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-etc-swift\") pod \"swift-ring-rebalance-debug-hmt5c\" (UID: \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c" Dec 02 12:13:38 crc kubenswrapper[4721]: I1202 12:13:38.590147 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6h6m\" (UniqueName: \"kubernetes.io/projected/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-kube-api-access-n6h6m\") pod \"swift-ring-rebalance-debug-hmt5c\" (UID: \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c" Dec 02 12:13:38 crc kubenswrapper[4721]: I1202 12:13:38.590217 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-swiftconf\") pod \"swift-ring-rebalance-debug-hmt5c\" (UID: \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c" Dec 02 12:13:38 crc kubenswrapper[4721]: I1202 12:13:38.590258 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-dispersionconf\") pod \"swift-ring-rebalance-debug-hmt5c\" (UID: \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c" Dec 02 12:13:38 crc kubenswrapper[4721]: I1202 12:13:38.691668 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-etc-swift\") pod \"swift-ring-rebalance-debug-hmt5c\" (UID: \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c" Dec 02 12:13:38 crc kubenswrapper[4721]: I1202 12:13:38.691711 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6h6m\" (UniqueName: \"kubernetes.io/projected/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-kube-api-access-n6h6m\") pod \"swift-ring-rebalance-debug-hmt5c\" (UID: \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c" Dec 02 12:13:38 crc kubenswrapper[4721]: I1202 12:13:38.691757 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-swiftconf\") pod \"swift-ring-rebalance-debug-hmt5c\" (UID: \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c" Dec 02 12:13:38 crc kubenswrapper[4721]: I1202 12:13:38.691787 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-dispersionconf\") pod \"swift-ring-rebalance-debug-hmt5c\" (UID: \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c" Dec 02 12:13:38 crc kubenswrapper[4721]: I1202 12:13:38.691831 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-ring-data-devices\") pod \"swift-ring-rebalance-debug-hmt5c\" (UID: \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c" Dec 02 12:13:38 crc kubenswrapper[4721]: I1202 12:13:38.691854 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-scripts\") pod \"swift-ring-rebalance-debug-hmt5c\" (UID: \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c" Dec 02 12:13:38 crc kubenswrapper[4721]: I1202 12:13:38.692571 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-etc-swift\") pod \"swift-ring-rebalance-debug-hmt5c\" (UID: \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c" Dec 02 12:13:38 crc kubenswrapper[4721]: I1202 12:13:38.692827 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-scripts\") pod \"swift-ring-rebalance-debug-hmt5c\" (UID: \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c" Dec 02 12:13:38 crc kubenswrapper[4721]: I1202 12:13:38.693534 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-ring-data-devices\") pod \"swift-ring-rebalance-debug-hmt5c\" (UID: \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c" Dec 02 12:13:38 crc kubenswrapper[4721]: I1202 12:13:38.695501 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-swiftconf\") pod \"swift-ring-rebalance-debug-hmt5c\" (UID: \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c" Dec 02 12:13:38 crc kubenswrapper[4721]: I1202 12:13:38.697165 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-dispersionconf\") pod \"swift-ring-rebalance-debug-hmt5c\" (UID: \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c" Dec 02 12:13:38 crc kubenswrapper[4721]: I1202 12:13:38.708898 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6h6m\" (UniqueName: \"kubernetes.io/projected/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-kube-api-access-n6h6m\") pod \"swift-ring-rebalance-debug-hmt5c\" (UID: \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c" Dec 02 12:13:38 crc kubenswrapper[4721]: I1202 12:13:38.848740 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8256aec-df1d-4261-8810-28b35906db97" path="/var/lib/kubelet/pods/d8256aec-df1d-4261-8810-28b35906db97/volumes" Dec 02 12:13:38 crc kubenswrapper[4721]: I1202 12:13:38.854302 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c" Dec 02 12:13:39 crc kubenswrapper[4721]: I1202 12:13:39.323996 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c"] Dec 02 12:13:40 crc kubenswrapper[4721]: I1202 12:13:40.057288 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c" event={"ID":"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3","Type":"ContainerStarted","Data":"8aeff8eb884c352c036e2624516bebae090a848c1e902da00c3a726c9655f9bf"} Dec 02 12:13:40 crc kubenswrapper[4721]: I1202 12:13:40.057626 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c" event={"ID":"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3","Type":"ContainerStarted","Data":"1e60c62097d3458895fe1fa27659a8fcc700e376747da662751937413ae886ab"} Dec 02 12:13:41 crc kubenswrapper[4721]: I1202 12:13:41.070535 4721 generic.go:334] "Generic (PLEG): container finished" podID="5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3" containerID="8aeff8eb884c352c036e2624516bebae090a848c1e902da00c3a726c9655f9bf" exitCode=0 Dec 02 12:13:41 crc kubenswrapper[4721]: I1202 12:13:41.070615 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c" event={"ID":"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3","Type":"ContainerDied","Data":"8aeff8eb884c352c036e2624516bebae090a848c1e902da00c3a726c9655f9bf"} Dec 02 12:13:42 crc kubenswrapper[4721]: I1202 12:13:42.437707 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c" Dec 02 12:13:42 crc kubenswrapper[4721]: I1202 12:13:42.452932 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-ring-data-devices\") pod \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\" (UID: \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\") " Dec 02 12:13:42 crc kubenswrapper[4721]: I1202 12:13:42.452975 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-scripts\") pod \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\" (UID: \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\") " Dec 02 12:13:42 crc kubenswrapper[4721]: I1202 12:13:42.453114 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n6h6m\" (UniqueName: \"kubernetes.io/projected/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-kube-api-access-n6h6m\") pod \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\" (UID: \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\") " Dec 02 12:13:42 crc kubenswrapper[4721]: I1202 12:13:42.453193 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-etc-swift\") pod \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\" (UID: \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\") " Dec 02 12:13:42 crc kubenswrapper[4721]: I1202 12:13:42.453219 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-swiftconf\") pod \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\" (UID: \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\") " Dec 02 12:13:42 crc kubenswrapper[4721]: I1202 12:13:42.453284 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-dispersionconf\") pod \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\" (UID: \"5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3\") " Dec 02 12:13:42 crc kubenswrapper[4721]: I1202 12:13:42.454264 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3" (UID: "5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:13:42 crc kubenswrapper[4721]: I1202 12:13:42.454529 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3" (UID: "5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:13:42 crc kubenswrapper[4721]: I1202 12:13:42.458380 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-kube-api-access-n6h6m" (OuterVolumeSpecName: "kube-api-access-n6h6m") pod "5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3" (UID: "5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3"). InnerVolumeSpecName "kube-api-access-n6h6m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:13:42 crc kubenswrapper[4721]: I1202 12:13:42.479035 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-scripts" (OuterVolumeSpecName: "scripts") pod "5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3" (UID: "5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:13:42 crc kubenswrapper[4721]: I1202 12:13:42.481778 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3" (UID: "5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:13:42 crc kubenswrapper[4721]: I1202 12:13:42.492787 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3" (UID: "5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:13:42 crc kubenswrapper[4721]: I1202 12:13:42.495770 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c"] Dec 02 12:13:42 crc kubenswrapper[4721]: I1202 12:13:42.503977 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c"] Dec 02 12:13:42 crc kubenswrapper[4721]: I1202 12:13:42.555481 4721 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:42 crc kubenswrapper[4721]: I1202 12:13:42.555511 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:42 crc kubenswrapper[4721]: I1202 12:13:42.555521 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n6h6m\" (UniqueName: \"kubernetes.io/projected/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-kube-api-access-n6h6m\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:42 crc kubenswrapper[4721]: I1202 12:13:42.555544 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:42 crc kubenswrapper[4721]: I1202 12:13:42.555556 4721 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:42 crc kubenswrapper[4721]: I1202 12:13:42.555563 4721 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:42 crc kubenswrapper[4721]: I1202 12:13:42.847405 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3" path="/var/lib/kubelet/pods/5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3/volumes" Dec 02 12:13:43 crc kubenswrapper[4721]: I1202 12:13:43.089377 4721 scope.go:117] "RemoveContainer" containerID="8aeff8eb884c352c036e2624516bebae090a848c1e902da00c3a726c9655f9bf" Dec 02 12:13:43 crc kubenswrapper[4721]: I1202 12:13:43.089511 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-hmt5c" Dec 02 12:13:43 crc kubenswrapper[4721]: I1202 12:13:43.684608 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt"] Dec 02 12:13:43 crc kubenswrapper[4721]: E1202 12:13:43.684951 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3" containerName="swift-ring-rebalance" Dec 02 12:13:43 crc kubenswrapper[4721]: I1202 12:13:43.684964 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3" containerName="swift-ring-rebalance" Dec 02 12:13:43 crc kubenswrapper[4721]: I1202 12:13:43.685100 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="5baf20a6-e9f5-4270-a9e7-e3f0fef8c7b3" containerName="swift-ring-rebalance" Dec 02 12:13:43 crc kubenswrapper[4721]: I1202 12:13:43.685555 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt" Dec 02 12:13:43 crc kubenswrapper[4721]: I1202 12:13:43.690509 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Dec 02 12:13:43 crc kubenswrapper[4721]: I1202 12:13:43.690535 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Dec 02 12:13:43 crc kubenswrapper[4721]: I1202 12:13:43.696850 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt"] Dec 02 12:13:43 crc kubenswrapper[4721]: I1202 12:13:43.771861 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b6f84eb7-88f0-4dfc-a147-6a7412a74696-etc-swift\") pod \"swift-ring-rebalance-debug-7vqdt\" (UID: \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt" Dec 02 12:13:43 crc kubenswrapper[4721]: I1202 12:13:43.771896 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b6f84eb7-88f0-4dfc-a147-6a7412a74696-ring-data-devices\") pod \"swift-ring-rebalance-debug-7vqdt\" (UID: \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt" Dec 02 12:13:43 crc kubenswrapper[4721]: I1202 12:13:43.771946 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b6f84eb7-88f0-4dfc-a147-6a7412a74696-scripts\") pod \"swift-ring-rebalance-debug-7vqdt\" (UID: \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt" Dec 02 12:13:43 crc kubenswrapper[4721]: I1202 12:13:43.771970 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b6f84eb7-88f0-4dfc-a147-6a7412a74696-dispersionconf\") pod \"swift-ring-rebalance-debug-7vqdt\" (UID: \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt" Dec 02 12:13:43 crc kubenswrapper[4721]: I1202 12:13:43.772209 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b6f84eb7-88f0-4dfc-a147-6a7412a74696-swiftconf\") pod \"swift-ring-rebalance-debug-7vqdt\" (UID: \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt" Dec 02 12:13:43 crc kubenswrapper[4721]: I1202 12:13:43.772411 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfnf2\" (UniqueName: \"kubernetes.io/projected/b6f84eb7-88f0-4dfc-a147-6a7412a74696-kube-api-access-tfnf2\") pod \"swift-ring-rebalance-debug-7vqdt\" (UID: \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt" Dec 02 12:13:43 crc kubenswrapper[4721]: I1202 12:13:43.873897 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b6f84eb7-88f0-4dfc-a147-6a7412a74696-etc-swift\") pod \"swift-ring-rebalance-debug-7vqdt\" (UID: \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt" Dec 02 12:13:43 crc kubenswrapper[4721]: I1202 12:13:43.873937 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b6f84eb7-88f0-4dfc-a147-6a7412a74696-ring-data-devices\") pod \"swift-ring-rebalance-debug-7vqdt\" (UID: \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt" Dec 02 12:13:43 crc kubenswrapper[4721]: I1202 12:13:43.873984 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b6f84eb7-88f0-4dfc-a147-6a7412a74696-scripts\") pod \"swift-ring-rebalance-debug-7vqdt\" (UID: \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt" Dec 02 12:13:43 crc kubenswrapper[4721]: I1202 12:13:43.874009 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b6f84eb7-88f0-4dfc-a147-6a7412a74696-dispersionconf\") pod \"swift-ring-rebalance-debug-7vqdt\" (UID: \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt" Dec 02 12:13:43 crc kubenswrapper[4721]: I1202 12:13:43.874092 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b6f84eb7-88f0-4dfc-a147-6a7412a74696-swiftconf\") pod \"swift-ring-rebalance-debug-7vqdt\" (UID: \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt" Dec 02 12:13:43 crc kubenswrapper[4721]: I1202 12:13:43.874134 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfnf2\" (UniqueName: \"kubernetes.io/projected/b6f84eb7-88f0-4dfc-a147-6a7412a74696-kube-api-access-tfnf2\") pod \"swift-ring-rebalance-debug-7vqdt\" (UID: \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt" Dec 02 12:13:43 crc kubenswrapper[4721]: I1202 12:13:43.874480 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b6f84eb7-88f0-4dfc-a147-6a7412a74696-etc-swift\") pod \"swift-ring-rebalance-debug-7vqdt\" (UID: \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt" Dec 02 12:13:43 crc kubenswrapper[4721]: I1202 12:13:43.874867 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b6f84eb7-88f0-4dfc-a147-6a7412a74696-ring-data-devices\") pod \"swift-ring-rebalance-debug-7vqdt\" (UID: \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt" Dec 02 12:13:43 crc kubenswrapper[4721]: I1202 12:13:43.875705 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b6f84eb7-88f0-4dfc-a147-6a7412a74696-scripts\") pod \"swift-ring-rebalance-debug-7vqdt\" (UID: \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt" Dec 02 12:13:43 crc kubenswrapper[4721]: I1202 12:13:43.881095 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b6f84eb7-88f0-4dfc-a147-6a7412a74696-swiftconf\") pod \"swift-ring-rebalance-debug-7vqdt\" (UID: \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt" Dec 02 12:13:43 crc kubenswrapper[4721]: I1202 12:13:43.886948 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b6f84eb7-88f0-4dfc-a147-6a7412a74696-dispersionconf\") pod \"swift-ring-rebalance-debug-7vqdt\" (UID: \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt" Dec 02 12:13:43 crc kubenswrapper[4721]: I1202 12:13:43.896271 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfnf2\" (UniqueName: \"kubernetes.io/projected/b6f84eb7-88f0-4dfc-a147-6a7412a74696-kube-api-access-tfnf2\") pod \"swift-ring-rebalance-debug-7vqdt\" (UID: \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt" Dec 02 12:13:44 crc kubenswrapper[4721]: I1202 12:13:44.004787 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt" Dec 02 12:13:44 crc kubenswrapper[4721]: I1202 12:13:44.460181 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt"] Dec 02 12:13:44 crc kubenswrapper[4721]: W1202 12:13:44.462235 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb6f84eb7_88f0_4dfc_a147_6a7412a74696.slice/crio-0d2f7cfce772ebfd978b8e5be9f733578d1903676ab949ce0e1ca21f87489a9d WatchSource:0}: Error finding container 0d2f7cfce772ebfd978b8e5be9f733578d1903676ab949ce0e1ca21f87489a9d: Status 404 returned error can't find the container with id 0d2f7cfce772ebfd978b8e5be9f733578d1903676ab949ce0e1ca21f87489a9d Dec 02 12:13:45 crc kubenswrapper[4721]: I1202 12:13:45.111546 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt" event={"ID":"b6f84eb7-88f0-4dfc-a147-6a7412a74696","Type":"ContainerStarted","Data":"d1d795e05b76189fa88a0000068bea65d1418dc6a05ab971bb5f8f433e613460"} Dec 02 12:13:45 crc kubenswrapper[4721]: I1202 12:13:45.111902 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt" event={"ID":"b6f84eb7-88f0-4dfc-a147-6a7412a74696","Type":"ContainerStarted","Data":"0d2f7cfce772ebfd978b8e5be9f733578d1903676ab949ce0e1ca21f87489a9d"} Dec 02 12:13:45 crc kubenswrapper[4721]: I1202 12:13:45.137515 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt" podStartSLOduration=2.1374966 podStartE2EDuration="2.1374966s" podCreationTimestamp="2025-12-02 12:13:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:13:45.133115355 +0000 UTC m=+1354.871665724" watchObservedRunningTime="2025-12-02 12:13:45.1374966 +0000 UTC m=+1354.876046949" Dec 02 12:13:46 crc kubenswrapper[4721]: I1202 12:13:46.119843 4721 generic.go:334] "Generic (PLEG): container finished" podID="b6f84eb7-88f0-4dfc-a147-6a7412a74696" containerID="d1d795e05b76189fa88a0000068bea65d1418dc6a05ab971bb5f8f433e613460" exitCode=0 Dec 02 12:13:46 crc kubenswrapper[4721]: I1202 12:13:46.119954 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt" event={"ID":"b6f84eb7-88f0-4dfc-a147-6a7412a74696","Type":"ContainerDied","Data":"d1d795e05b76189fa88a0000068bea65d1418dc6a05ab971bb5f8f433e613460"} Dec 02 12:13:47 crc kubenswrapper[4721]: I1202 12:13:47.407204 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt" Dec 02 12:13:47 crc kubenswrapper[4721]: I1202 12:13:47.426115 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b6f84eb7-88f0-4dfc-a147-6a7412a74696-swiftconf\") pod \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\" (UID: \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\") " Dec 02 12:13:47 crc kubenswrapper[4721]: I1202 12:13:47.426189 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfnf2\" (UniqueName: \"kubernetes.io/projected/b6f84eb7-88f0-4dfc-a147-6a7412a74696-kube-api-access-tfnf2\") pod \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\" (UID: \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\") " Dec 02 12:13:47 crc kubenswrapper[4721]: I1202 12:13:47.426217 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b6f84eb7-88f0-4dfc-a147-6a7412a74696-ring-data-devices\") pod \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\" (UID: \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\") " Dec 02 12:13:47 crc kubenswrapper[4721]: I1202 12:13:47.426241 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b6f84eb7-88f0-4dfc-a147-6a7412a74696-etc-swift\") pod \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\" (UID: \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\") " Dec 02 12:13:47 crc kubenswrapper[4721]: I1202 12:13:47.426274 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b6f84eb7-88f0-4dfc-a147-6a7412a74696-scripts\") pod \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\" (UID: \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\") " Dec 02 12:13:47 crc kubenswrapper[4721]: I1202 12:13:47.426328 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b6f84eb7-88f0-4dfc-a147-6a7412a74696-dispersionconf\") pod \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\" (UID: \"b6f84eb7-88f0-4dfc-a147-6a7412a74696\") " Dec 02 12:13:47 crc kubenswrapper[4721]: I1202 12:13:47.431510 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6f84eb7-88f0-4dfc-a147-6a7412a74696-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "b6f84eb7-88f0-4dfc-a147-6a7412a74696" (UID: "b6f84eb7-88f0-4dfc-a147-6a7412a74696"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:13:47 crc kubenswrapper[4721]: I1202 12:13:47.432301 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6f84eb7-88f0-4dfc-a147-6a7412a74696-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "b6f84eb7-88f0-4dfc-a147-6a7412a74696" (UID: "b6f84eb7-88f0-4dfc-a147-6a7412a74696"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:13:47 crc kubenswrapper[4721]: I1202 12:13:47.438187 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6f84eb7-88f0-4dfc-a147-6a7412a74696-kube-api-access-tfnf2" (OuterVolumeSpecName: "kube-api-access-tfnf2") pod "b6f84eb7-88f0-4dfc-a147-6a7412a74696" (UID: "b6f84eb7-88f0-4dfc-a147-6a7412a74696"). InnerVolumeSpecName "kube-api-access-tfnf2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:13:47 crc kubenswrapper[4721]: I1202 12:13:47.455427 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6f84eb7-88f0-4dfc-a147-6a7412a74696-scripts" (OuterVolumeSpecName: "scripts") pod "b6f84eb7-88f0-4dfc-a147-6a7412a74696" (UID: "b6f84eb7-88f0-4dfc-a147-6a7412a74696"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:13:47 crc kubenswrapper[4721]: I1202 12:13:47.459013 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt"] Dec 02 12:13:47 crc kubenswrapper[4721]: I1202 12:13:47.459171 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6f84eb7-88f0-4dfc-a147-6a7412a74696-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "b6f84eb7-88f0-4dfc-a147-6a7412a74696" (UID: "b6f84eb7-88f0-4dfc-a147-6a7412a74696"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:13:47 crc kubenswrapper[4721]: I1202 12:13:47.470520 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt"] Dec 02 12:13:47 crc kubenswrapper[4721]: I1202 12:13:47.477945 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6f84eb7-88f0-4dfc-a147-6a7412a74696-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "b6f84eb7-88f0-4dfc-a147-6a7412a74696" (UID: "b6f84eb7-88f0-4dfc-a147-6a7412a74696"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:13:47 crc kubenswrapper[4721]: I1202 12:13:47.529080 4721 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b6f84eb7-88f0-4dfc-a147-6a7412a74696-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:47 crc kubenswrapper[4721]: I1202 12:13:47.529110 4721 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b6f84eb7-88f0-4dfc-a147-6a7412a74696-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:47 crc kubenswrapper[4721]: I1202 12:13:47.529120 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfnf2\" (UniqueName: \"kubernetes.io/projected/b6f84eb7-88f0-4dfc-a147-6a7412a74696-kube-api-access-tfnf2\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:47 crc kubenswrapper[4721]: I1202 12:13:47.529130 4721 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b6f84eb7-88f0-4dfc-a147-6a7412a74696-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:47 crc kubenswrapper[4721]: I1202 12:13:47.529138 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b6f84eb7-88f0-4dfc-a147-6a7412a74696-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:47 crc kubenswrapper[4721]: I1202 12:13:47.529147 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b6f84eb7-88f0-4dfc-a147-6a7412a74696-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:48 crc kubenswrapper[4721]: I1202 12:13:48.136519 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d2f7cfce772ebfd978b8e5be9f733578d1903676ab949ce0e1ca21f87489a9d" Dec 02 12:13:48 crc kubenswrapper[4721]: I1202 12:13:48.136611 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-7vqdt" Dec 02 12:13:48 crc kubenswrapper[4721]: I1202 12:13:48.604123 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt"] Dec 02 12:13:48 crc kubenswrapper[4721]: E1202 12:13:48.604532 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6f84eb7-88f0-4dfc-a147-6a7412a74696" containerName="swift-ring-rebalance" Dec 02 12:13:48 crc kubenswrapper[4721]: I1202 12:13:48.604547 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6f84eb7-88f0-4dfc-a147-6a7412a74696" containerName="swift-ring-rebalance" Dec 02 12:13:48 crc kubenswrapper[4721]: I1202 12:13:48.604731 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6f84eb7-88f0-4dfc-a147-6a7412a74696" containerName="swift-ring-rebalance" Dec 02 12:13:48 crc kubenswrapper[4721]: I1202 12:13:48.605304 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt" Dec 02 12:13:48 crc kubenswrapper[4721]: I1202 12:13:48.608019 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Dec 02 12:13:48 crc kubenswrapper[4721]: I1202 12:13:48.608218 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Dec 02 12:13:48 crc kubenswrapper[4721]: I1202 12:13:48.623971 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt"] Dec 02 12:13:48 crc kubenswrapper[4721]: I1202 12:13:48.743780 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/36d577a8-5ff0-42eb-b739-58b687bd5175-etc-swift\") pod \"swift-ring-rebalance-debug-w8bdt\" (UID: \"36d577a8-5ff0-42eb-b739-58b687bd5175\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt" Dec 02 12:13:48 crc kubenswrapper[4721]: I1202 12:13:48.743816 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/36d577a8-5ff0-42eb-b739-58b687bd5175-dispersionconf\") pod \"swift-ring-rebalance-debug-w8bdt\" (UID: \"36d577a8-5ff0-42eb-b739-58b687bd5175\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt" Dec 02 12:13:48 crc kubenswrapper[4721]: I1202 12:13:48.743843 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/36d577a8-5ff0-42eb-b739-58b687bd5175-ring-data-devices\") pod \"swift-ring-rebalance-debug-w8bdt\" (UID: \"36d577a8-5ff0-42eb-b739-58b687bd5175\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt" Dec 02 12:13:48 crc kubenswrapper[4721]: I1202 12:13:48.743913 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/36d577a8-5ff0-42eb-b739-58b687bd5175-swiftconf\") pod \"swift-ring-rebalance-debug-w8bdt\" (UID: \"36d577a8-5ff0-42eb-b739-58b687bd5175\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt" Dec 02 12:13:48 crc kubenswrapper[4721]: I1202 12:13:48.743944 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/36d577a8-5ff0-42eb-b739-58b687bd5175-scripts\") pod \"swift-ring-rebalance-debug-w8bdt\" (UID: \"36d577a8-5ff0-42eb-b739-58b687bd5175\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt" Dec 02 12:13:48 crc kubenswrapper[4721]: I1202 12:13:48.743966 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7znxn\" (UniqueName: \"kubernetes.io/projected/36d577a8-5ff0-42eb-b739-58b687bd5175-kube-api-access-7znxn\") pod \"swift-ring-rebalance-debug-w8bdt\" (UID: \"36d577a8-5ff0-42eb-b739-58b687bd5175\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt" Dec 02 12:13:48 crc kubenswrapper[4721]: I1202 12:13:48.845302 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/36d577a8-5ff0-42eb-b739-58b687bd5175-swiftconf\") pod \"swift-ring-rebalance-debug-w8bdt\" (UID: \"36d577a8-5ff0-42eb-b739-58b687bd5175\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt" Dec 02 12:13:48 crc kubenswrapper[4721]: I1202 12:13:48.845453 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/36d577a8-5ff0-42eb-b739-58b687bd5175-scripts\") pod \"swift-ring-rebalance-debug-w8bdt\" (UID: \"36d577a8-5ff0-42eb-b739-58b687bd5175\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt" Dec 02 12:13:48 crc kubenswrapper[4721]: I1202 12:13:48.845486 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7znxn\" (UniqueName: \"kubernetes.io/projected/36d577a8-5ff0-42eb-b739-58b687bd5175-kube-api-access-7znxn\") pod \"swift-ring-rebalance-debug-w8bdt\" (UID: \"36d577a8-5ff0-42eb-b739-58b687bd5175\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt" Dec 02 12:13:48 crc kubenswrapper[4721]: I1202 12:13:48.845566 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/36d577a8-5ff0-42eb-b739-58b687bd5175-etc-swift\") pod \"swift-ring-rebalance-debug-w8bdt\" (UID: \"36d577a8-5ff0-42eb-b739-58b687bd5175\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt" Dec 02 12:13:48 crc kubenswrapper[4721]: I1202 12:13:48.845589 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/36d577a8-5ff0-42eb-b739-58b687bd5175-dispersionconf\") pod \"swift-ring-rebalance-debug-w8bdt\" (UID: \"36d577a8-5ff0-42eb-b739-58b687bd5175\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt" Dec 02 12:13:48 crc kubenswrapper[4721]: I1202 12:13:48.845622 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/36d577a8-5ff0-42eb-b739-58b687bd5175-ring-data-devices\") pod \"swift-ring-rebalance-debug-w8bdt\" (UID: \"36d577a8-5ff0-42eb-b739-58b687bd5175\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt" Dec 02 12:13:48 crc kubenswrapper[4721]: I1202 12:13:48.846510 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/36d577a8-5ff0-42eb-b739-58b687bd5175-scripts\") pod \"swift-ring-rebalance-debug-w8bdt\" (UID: \"36d577a8-5ff0-42eb-b739-58b687bd5175\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt" Dec 02 12:13:48 crc kubenswrapper[4721]: I1202 12:13:48.846532 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/36d577a8-5ff0-42eb-b739-58b687bd5175-ring-data-devices\") pod \"swift-ring-rebalance-debug-w8bdt\" (UID: \"36d577a8-5ff0-42eb-b739-58b687bd5175\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt" Dec 02 12:13:48 crc kubenswrapper[4721]: I1202 12:13:48.846734 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/36d577a8-5ff0-42eb-b739-58b687bd5175-etc-swift\") pod \"swift-ring-rebalance-debug-w8bdt\" (UID: \"36d577a8-5ff0-42eb-b739-58b687bd5175\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt" Dec 02 12:13:48 crc kubenswrapper[4721]: I1202 12:13:48.849704 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/36d577a8-5ff0-42eb-b739-58b687bd5175-swiftconf\") pod \"swift-ring-rebalance-debug-w8bdt\" (UID: \"36d577a8-5ff0-42eb-b739-58b687bd5175\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt" Dec 02 12:13:48 crc kubenswrapper[4721]: I1202 12:13:48.851702 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/36d577a8-5ff0-42eb-b739-58b687bd5175-dispersionconf\") pod \"swift-ring-rebalance-debug-w8bdt\" (UID: \"36d577a8-5ff0-42eb-b739-58b687bd5175\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt" Dec 02 12:13:48 crc kubenswrapper[4721]: I1202 12:13:48.854125 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6f84eb7-88f0-4dfc-a147-6a7412a74696" path="/var/lib/kubelet/pods/b6f84eb7-88f0-4dfc-a147-6a7412a74696/volumes" Dec 02 12:13:48 crc kubenswrapper[4721]: I1202 12:13:48.863386 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7znxn\" (UniqueName: \"kubernetes.io/projected/36d577a8-5ff0-42eb-b739-58b687bd5175-kube-api-access-7znxn\") pod \"swift-ring-rebalance-debug-w8bdt\" (UID: \"36d577a8-5ff0-42eb-b739-58b687bd5175\") " pod="swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt" Dec 02 12:13:48 crc kubenswrapper[4721]: I1202 12:13:48.921994 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt" Dec 02 12:13:49 crc kubenswrapper[4721]: I1202 12:13:49.346503 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt"] Dec 02 12:13:50 crc kubenswrapper[4721]: I1202 12:13:50.152464 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt" event={"ID":"36d577a8-5ff0-42eb-b739-58b687bd5175","Type":"ContainerStarted","Data":"22cc57845216570291c272cccdc9e3b29300886e1da0a82d1ec5a63b31e024b6"} Dec 02 12:13:50 crc kubenswrapper[4721]: I1202 12:13:50.152852 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt" event={"ID":"36d577a8-5ff0-42eb-b739-58b687bd5175","Type":"ContainerStarted","Data":"10a8782a263e60be73e7ed395bf553a296117490750c5d8a6f896d63fb4a00e4"} Dec 02 12:13:50 crc kubenswrapper[4721]: I1202 12:13:50.176428 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt" podStartSLOduration=2.176408054 podStartE2EDuration="2.176408054s" podCreationTimestamp="2025-12-02 12:13:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:13:50.171985707 +0000 UTC m=+1359.910536056" watchObservedRunningTime="2025-12-02 12:13:50.176408054 +0000 UTC m=+1359.914958403" Dec 02 12:13:51 crc kubenswrapper[4721]: I1202 12:13:51.159468 4721 generic.go:334] "Generic (PLEG): container finished" podID="36d577a8-5ff0-42eb-b739-58b687bd5175" containerID="22cc57845216570291c272cccdc9e3b29300886e1da0a82d1ec5a63b31e024b6" exitCode=0 Dec 02 12:13:51 crc kubenswrapper[4721]: I1202 12:13:51.159510 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt" event={"ID":"36d577a8-5ff0-42eb-b739-58b687bd5175","Type":"ContainerDied","Data":"22cc57845216570291c272cccdc9e3b29300886e1da0a82d1ec5a63b31e024b6"} Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.502879 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt" Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.550437 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt"] Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.558208 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt"] Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.597974 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/36d577a8-5ff0-42eb-b739-58b687bd5175-ring-data-devices\") pod \"36d577a8-5ff0-42eb-b739-58b687bd5175\" (UID: \"36d577a8-5ff0-42eb-b739-58b687bd5175\") " Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.598074 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/36d577a8-5ff0-42eb-b739-58b687bd5175-etc-swift\") pod \"36d577a8-5ff0-42eb-b739-58b687bd5175\" (UID: \"36d577a8-5ff0-42eb-b739-58b687bd5175\") " Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.598115 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/36d577a8-5ff0-42eb-b739-58b687bd5175-swiftconf\") pod \"36d577a8-5ff0-42eb-b739-58b687bd5175\" (UID: \"36d577a8-5ff0-42eb-b739-58b687bd5175\") " Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.598176 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/36d577a8-5ff0-42eb-b739-58b687bd5175-dispersionconf\") pod \"36d577a8-5ff0-42eb-b739-58b687bd5175\" (UID: \"36d577a8-5ff0-42eb-b739-58b687bd5175\") " Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.598203 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/36d577a8-5ff0-42eb-b739-58b687bd5175-scripts\") pod \"36d577a8-5ff0-42eb-b739-58b687bd5175\" (UID: \"36d577a8-5ff0-42eb-b739-58b687bd5175\") " Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.598303 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7znxn\" (UniqueName: \"kubernetes.io/projected/36d577a8-5ff0-42eb-b739-58b687bd5175-kube-api-access-7znxn\") pod \"36d577a8-5ff0-42eb-b739-58b687bd5175\" (UID: \"36d577a8-5ff0-42eb-b739-58b687bd5175\") " Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.598775 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/36d577a8-5ff0-42eb-b739-58b687bd5175-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "36d577a8-5ff0-42eb-b739-58b687bd5175" (UID: "36d577a8-5ff0-42eb-b739-58b687bd5175"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.598953 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36d577a8-5ff0-42eb-b739-58b687bd5175-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "36d577a8-5ff0-42eb-b739-58b687bd5175" (UID: "36d577a8-5ff0-42eb-b739-58b687bd5175"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.607064 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36d577a8-5ff0-42eb-b739-58b687bd5175-kube-api-access-7znxn" (OuterVolumeSpecName: "kube-api-access-7znxn") pod "36d577a8-5ff0-42eb-b739-58b687bd5175" (UID: "36d577a8-5ff0-42eb-b739-58b687bd5175"). InnerVolumeSpecName "kube-api-access-7znxn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.629594 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/36d577a8-5ff0-42eb-b739-58b687bd5175-scripts" (OuterVolumeSpecName: "scripts") pod "36d577a8-5ff0-42eb-b739-58b687bd5175" (UID: "36d577a8-5ff0-42eb-b739-58b687bd5175"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.650398 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-t8k78"] Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.654192 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.654808 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="account-server" containerID="cri-o://1da308cd6e4b57955810b3f16b8850390d22406c5c67f5271af2bfcb6e30b8d5" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.655205 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="swift-recon-cron" containerID="cri-o://fa37ab877067b4bd0837ef51174cd21450a84a3f3c933b37297b14c21c679786" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.655279 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="rsync" containerID="cri-o://d0135e82a83e7dd432d606cd974ccad81f99a7740082c819d27aad70f20df54c" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.655329 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="object-expirer" containerID="cri-o://a42959157e2fd85ce825bcf1a4e95457615a57ce555725522b47bd8f26bf7ac9" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.655396 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="object-updater" containerID="cri-o://a325af09c6f45c8e41a6dbcb452cd9644b277e3ad497298b0240f6c9b7e522ef" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.655439 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="object-auditor" containerID="cri-o://ea3dc07b828742ed47d9ee04d67d8b69338eda5d1113da2c4460899c2c5acb37" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.655480 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="object-replicator" containerID="cri-o://69ab3857ba197ddd18cda3cf08e05c79f4925f822cd01d76d13743798385b8fb" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.655522 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="object-server" containerID="cri-o://97ca0d88fc5a289cee8c0ab0310c5c97915d5ac70a170da304db86445523758a" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.655566 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="container-updater" containerID="cri-o://5a42824206ed29faa1e2a17dae669ab6e5d3adea29dd3b5a786383a72fc3adf1" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.655614 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="container-auditor" containerID="cri-o://38828a33ce455be19b0e43568db4ffd83b44c1da254ef1943cc620bb9f4f65d2" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.655662 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="container-replicator" containerID="cri-o://cb4782790aa9a69b2a20327fa4e51e5f4a5f9461eb69c419341fc62846ef8318" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.655709 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="container-server" containerID="cri-o://355bdf529f7f19ac1a7928e73f70afc680d14b7084acc2ed4a8664677b5a38fc" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.655761 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="account-reaper" containerID="cri-o://5ddea8519471235cd203737898d4a84a22947a858c4161d0cdbc538395b90d59" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.655803 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="account-auditor" containerID="cri-o://18523f1db324b880b4580224bf2ad72c03020e5940c91f1f4e0d34781b24bbd0" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.655845 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="account-replicator" containerID="cri-o://b17edaa3291f4fcc1070e11c3ba8b7ea2370f6138a9e013896a7afcccc514df5" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.658863 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36d577a8-5ff0-42eb-b739-58b687bd5175-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "36d577a8-5ff0-42eb-b739-58b687bd5175" (UID: "36d577a8-5ff0-42eb-b739-58b687bd5175"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.672311 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36d577a8-5ff0-42eb-b739-58b687bd5175-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "36d577a8-5ff0-42eb-b739-58b687bd5175" (UID: "36d577a8-5ff0-42eb-b739-58b687bd5175"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.677504 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.677936 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="account-server" containerID="cri-o://9a14a12fa0925fa7c29432aa1ea91ad4238ba9bbcd8d6c303a4a6e23d98d992c" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.678296 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="swift-recon-cron" containerID="cri-o://9f93046a00ab18f63db21c1a5dba48ed7daff1a6b7044e4d1da47b18536102d3" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.678366 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="rsync" containerID="cri-o://17ecf94ee8139633e4482645dfc512d3c308f9254511ea8aab340da5449735b3" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.678406 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="object-expirer" containerID="cri-o://27cc7a59bb52c7bd6d9de63a62663eb93857c7142b7a434c5f3e8c7bb4b8aa05" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.678436 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="object-updater" containerID="cri-o://b13272c7bfd0fb6396c6e0277ad0fe07e2d432578e94c8ecd666fcb45ea827e9" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.678463 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="object-auditor" containerID="cri-o://fa20ac24780157fa9b52a532ecfc6a01b8357c8e4777d512d203c65148a94f96" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.678498 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="object-replicator" containerID="cri-o://66c39b5fbb7a7d9d64886db6217c70fa81f42865cdaa296dc478fcca96a41978" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.678526 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="object-server" containerID="cri-o://edea0e2ed18478f285ecaae9e57b77f1fcfb4dfd9b94f3ff54c22bc24b3b5df6" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.678558 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="container-updater" containerID="cri-o://19bd7e7242eb220985a6143c5b395a3162c141462dd4414ff8c5f615f49002b1" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.678585 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="container-auditor" containerID="cri-o://80210a92576ea67e922d78ea7ac8c50c584e2c56cd8467eb85b5b29838f31334" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.678651 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="container-replicator" containerID="cri-o://a868d42e6fa1c098fc9d1583f04dca38e0be60a59a7c0762d581b1ca1d12bbda" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.678683 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="container-server" containerID="cri-o://3c6328f8519642aa83a494e07195a10428397045d3b8562babcc3bd7fa755d16" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.678726 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="account-reaper" containerID="cri-o://afc24a08241150352ee3448002c09defeace534d3cdb24c6c11e113df2658865" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.678753 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="account-auditor" containerID="cri-o://c65cb5a856a1c5496889424f188ddde01dad3f8c58c03eaf04981bec7fcfa362" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.678783 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-2" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="account-replicator" containerID="cri-o://f33070308e90b30d9cdd71f45cf5283d4f08d7eb1d267b4d5c3f4fdb3b88e7c9" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.697850 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.698262 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="account-server" containerID="cri-o://bdbbf47a97448d8e3f16f5aeb238ed4d4f54e04bba167e8432c8d3dffe4602ca" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.698579 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="swift-recon-cron" containerID="cri-o://82622747e6978b5288a6f64a0f89c622a9c67f282e2129f1c52ace5096950be4" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.698617 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="rsync" containerID="cri-o://1c7c9162b575caa76db5e414cb7b4015f067355fff84caa7f8ac197367c94e01" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.698646 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="object-expirer" containerID="cri-o://a09e88795ce2afc38328685e05ceb0f9eb725d61c3b98331e5658b0322ce0e3a" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.698677 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="object-updater" containerID="cri-o://ca8d8914238f7696f9bae1ac633efc7bd5b63e29dac383a6626db4cf4f83dcb2" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.699439 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7znxn\" (UniqueName: \"kubernetes.io/projected/36d577a8-5ff0-42eb-b739-58b687bd5175-kube-api-access-7znxn\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.699454 4721 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/36d577a8-5ff0-42eb-b739-58b687bd5175-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.699462 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/36d577a8-5ff0-42eb-b739-58b687bd5175-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.699470 4721 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/36d577a8-5ff0-42eb-b739-58b687bd5175-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.699478 4721 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/36d577a8-5ff0-42eb-b739-58b687bd5175-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.699486 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/36d577a8-5ff0-42eb-b739-58b687bd5175-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.699501 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="object-auditor" containerID="cri-o://479d0dec2aaa6fd4ccce84dbbf0d0e58053b85167261d3f59757a121591636bc" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.699536 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="object-replicator" containerID="cri-o://39c5390ffcbe3c0be784db7ef05672025fcf3d952d626304e297e1590718a897" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.699564 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="object-server" containerID="cri-o://37cc4ccb7d8d4a2ff89c5372f3428d8517c9cf30a414c165ecbb4c6f1b5fa94c" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.699592 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="container-updater" containerID="cri-o://8363b9e2ac15e6050113b7320c011910acd54ded41353912314d366cc137db51" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.699631 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="container-auditor" containerID="cri-o://ddcdd1ec5d88f5c19c5c33956dd876023076ce8196b01b8b60b11011d978cc74" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.699664 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="container-replicator" containerID="cri-o://6e9c97886d2281d2db8385c13740c5db13108dbad3318be268566bdf4ef38e60" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.699691 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="container-server" containerID="cri-o://056786e0cc65aa5944f5f7bf4545d4c3cd9ae097250472dddf1c826cb1ac9231" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.699726 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="account-reaper" containerID="cri-o://3012afa35630159ed0600ebceef3df1eaf4dfb0728ea48837272b66f5dce0c7b" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.699755 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="account-auditor" containerID="cri-o://ff082e662a26aeeea6732d9ad265b9acb9da661a4252a9d3dd52302629db893e" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.699784 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-1" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="account-replicator" containerID="cri-o://3c4eacf0568ae73dd6413ce1561622aa8998c530cf8df7cce6ebe9c2c46d80b0" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.719796 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-t8k78"] Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.734743 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb"] Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.734964 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" podUID="06581551-03b2-4c6a-add5-cfa6216a461c" containerName="proxy-httpd" containerID="cri-o://cdc013152076d46bfb55d74405852fea85bf4ac53ddd98ed5ce5742dbf9483b1" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.735324 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" podUID="06581551-03b2-4c6a-add5-cfa6216a461c" containerName="proxy-server" containerID="cri-o://dd36af16b3f8948d39118eac04affce692461b8fccf54766ba111f311f82cc07" gracePeriod=30 Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.866495 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36d577a8-5ff0-42eb-b739-58b687bd5175" path="/var/lib/kubelet/pods/36d577a8-5ff0-42eb-b739-58b687bd5175/volumes" Dec 02 12:13:52 crc kubenswrapper[4721]: I1202 12:13:52.867495 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83333d8f-d5df-4ce3-a0eb-107d7ed9a623" path="/var/lib/kubelet/pods/83333d8f-d5df-4ce3-a0eb-107d7ed9a623/volumes" Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.174331 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-debug-w8bdt" Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.174423 4721 scope.go:117] "RemoveContainer" containerID="22cc57845216570291c272cccdc9e3b29300886e1da0a82d1ec5a63b31e024b6" Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.184304 4721 generic.go:334] "Generic (PLEG): container finished" podID="b8f4669a-6b01-451f-91cc-2786303375db" containerID="1c7c9162b575caa76db5e414cb7b4015f067355fff84caa7f8ac197367c94e01" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.184335 4721 generic.go:334] "Generic (PLEG): container finished" podID="b8f4669a-6b01-451f-91cc-2786303375db" containerID="a09e88795ce2afc38328685e05ceb0f9eb725d61c3b98331e5658b0322ce0e3a" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.184358 4721 generic.go:334] "Generic (PLEG): container finished" podID="b8f4669a-6b01-451f-91cc-2786303375db" containerID="ca8d8914238f7696f9bae1ac633efc7bd5b63e29dac383a6626db4cf4f83dcb2" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.184364 4721 generic.go:334] "Generic (PLEG): container finished" podID="b8f4669a-6b01-451f-91cc-2786303375db" containerID="479d0dec2aaa6fd4ccce84dbbf0d0e58053b85167261d3f59757a121591636bc" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.184371 4721 generic.go:334] "Generic (PLEG): container finished" podID="b8f4669a-6b01-451f-91cc-2786303375db" containerID="39c5390ffcbe3c0be784db7ef05672025fcf3d952d626304e297e1590718a897" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.184377 4721 generic.go:334] "Generic (PLEG): container finished" podID="b8f4669a-6b01-451f-91cc-2786303375db" containerID="37cc4ccb7d8d4a2ff89c5372f3428d8517c9cf30a414c165ecbb4c6f1b5fa94c" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.184383 4721 generic.go:334] "Generic (PLEG): container finished" podID="b8f4669a-6b01-451f-91cc-2786303375db" containerID="8363b9e2ac15e6050113b7320c011910acd54ded41353912314d366cc137db51" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.184389 4721 generic.go:334] "Generic (PLEG): container finished" podID="b8f4669a-6b01-451f-91cc-2786303375db" containerID="ddcdd1ec5d88f5c19c5c33956dd876023076ce8196b01b8b60b11011d978cc74" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.184396 4721 generic.go:334] "Generic (PLEG): container finished" podID="b8f4669a-6b01-451f-91cc-2786303375db" containerID="6e9c97886d2281d2db8385c13740c5db13108dbad3318be268566bdf4ef38e60" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.184401 4721 generic.go:334] "Generic (PLEG): container finished" podID="b8f4669a-6b01-451f-91cc-2786303375db" containerID="056786e0cc65aa5944f5f7bf4545d4c3cd9ae097250472dddf1c826cb1ac9231" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.184407 4721 generic.go:334] "Generic (PLEG): container finished" podID="b8f4669a-6b01-451f-91cc-2786303375db" containerID="3012afa35630159ed0600ebceef3df1eaf4dfb0728ea48837272b66f5dce0c7b" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.184413 4721 generic.go:334] "Generic (PLEG): container finished" podID="b8f4669a-6b01-451f-91cc-2786303375db" containerID="ff082e662a26aeeea6732d9ad265b9acb9da661a4252a9d3dd52302629db893e" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.184420 4721 generic.go:334] "Generic (PLEG): container finished" podID="b8f4669a-6b01-451f-91cc-2786303375db" containerID="3c4eacf0568ae73dd6413ce1561622aa8998c530cf8df7cce6ebe9c2c46d80b0" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.184460 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerDied","Data":"1c7c9162b575caa76db5e414cb7b4015f067355fff84caa7f8ac197367c94e01"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.184486 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerDied","Data":"a09e88795ce2afc38328685e05ceb0f9eb725d61c3b98331e5658b0322ce0e3a"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.184496 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerDied","Data":"ca8d8914238f7696f9bae1ac633efc7bd5b63e29dac383a6626db4cf4f83dcb2"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.184504 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerDied","Data":"479d0dec2aaa6fd4ccce84dbbf0d0e58053b85167261d3f59757a121591636bc"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.184513 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerDied","Data":"39c5390ffcbe3c0be784db7ef05672025fcf3d952d626304e297e1590718a897"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.184521 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerDied","Data":"37cc4ccb7d8d4a2ff89c5372f3428d8517c9cf30a414c165ecbb4c6f1b5fa94c"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.184529 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerDied","Data":"8363b9e2ac15e6050113b7320c011910acd54ded41353912314d366cc137db51"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.184537 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerDied","Data":"ddcdd1ec5d88f5c19c5c33956dd876023076ce8196b01b8b60b11011d978cc74"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.184546 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerDied","Data":"6e9c97886d2281d2db8385c13740c5db13108dbad3318be268566bdf4ef38e60"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.184554 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerDied","Data":"056786e0cc65aa5944f5f7bf4545d4c3cd9ae097250472dddf1c826cb1ac9231"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.184562 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerDied","Data":"3012afa35630159ed0600ebceef3df1eaf4dfb0728ea48837272b66f5dce0c7b"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.184571 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerDied","Data":"ff082e662a26aeeea6732d9ad265b9acb9da661a4252a9d3dd52302629db893e"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.184579 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerDied","Data":"3c4eacf0568ae73dd6413ce1561622aa8998c530cf8df7cce6ebe9c2c46d80b0"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.194081 4721 generic.go:334] "Generic (PLEG): container finished" podID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerID="27cc7a59bb52c7bd6d9de63a62663eb93857c7142b7a434c5f3e8c7bb4b8aa05" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.194115 4721 generic.go:334] "Generic (PLEG): container finished" podID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerID="b13272c7bfd0fb6396c6e0277ad0fe07e2d432578e94c8ecd666fcb45ea827e9" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.194127 4721 generic.go:334] "Generic (PLEG): container finished" podID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerID="fa20ac24780157fa9b52a532ecfc6a01b8357c8e4777d512d203c65148a94f96" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.194138 4721 generic.go:334] "Generic (PLEG): container finished" podID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerID="66c39b5fbb7a7d9d64886db6217c70fa81f42865cdaa296dc478fcca96a41978" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.194146 4721 generic.go:334] "Generic (PLEG): container finished" podID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerID="19bd7e7242eb220985a6143c5b395a3162c141462dd4414ff8c5f615f49002b1" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.194154 4721 generic.go:334] "Generic (PLEG): container finished" podID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerID="80210a92576ea67e922d78ea7ac8c50c584e2c56cd8467eb85b5b29838f31334" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.194163 4721 generic.go:334] "Generic (PLEG): container finished" podID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerID="a868d42e6fa1c098fc9d1583f04dca38e0be60a59a7c0762d581b1ca1d12bbda" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.194172 4721 generic.go:334] "Generic (PLEG): container finished" podID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerID="3c6328f8519642aa83a494e07195a10428397045d3b8562babcc3bd7fa755d16" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.194180 4721 generic.go:334] "Generic (PLEG): container finished" podID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerID="afc24a08241150352ee3448002c09defeace534d3cdb24c6c11e113df2658865" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.194188 4721 generic.go:334] "Generic (PLEG): container finished" podID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerID="c65cb5a856a1c5496889424f188ddde01dad3f8c58c03eaf04981bec7fcfa362" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.194198 4721 generic.go:334] "Generic (PLEG): container finished" podID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerID="f33070308e90b30d9cdd71f45cf5283d4f08d7eb1d267b4d5c3f4fdb3b88e7c9" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.194208 4721 generic.go:334] "Generic (PLEG): container finished" podID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerID="9a14a12fa0925fa7c29432aa1ea91ad4238ba9bbcd8d6c303a4a6e23d98d992c" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.194142 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerDied","Data":"27cc7a59bb52c7bd6d9de63a62663eb93857c7142b7a434c5f3e8c7bb4b8aa05"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.194291 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerDied","Data":"b13272c7bfd0fb6396c6e0277ad0fe07e2d432578e94c8ecd666fcb45ea827e9"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.194310 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerDied","Data":"fa20ac24780157fa9b52a532ecfc6a01b8357c8e4777d512d203c65148a94f96"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.194326 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerDied","Data":"66c39b5fbb7a7d9d64886db6217c70fa81f42865cdaa296dc478fcca96a41978"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.194409 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerDied","Data":"19bd7e7242eb220985a6143c5b395a3162c141462dd4414ff8c5f615f49002b1"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.194427 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerDied","Data":"80210a92576ea67e922d78ea7ac8c50c584e2c56cd8467eb85b5b29838f31334"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.194440 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerDied","Data":"a868d42e6fa1c098fc9d1583f04dca38e0be60a59a7c0762d581b1ca1d12bbda"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.194452 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerDied","Data":"3c6328f8519642aa83a494e07195a10428397045d3b8562babcc3bd7fa755d16"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.194463 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerDied","Data":"afc24a08241150352ee3448002c09defeace534d3cdb24c6c11e113df2658865"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.194475 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerDied","Data":"c65cb5a856a1c5496889424f188ddde01dad3f8c58c03eaf04981bec7fcfa362"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.194486 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerDied","Data":"f33070308e90b30d9cdd71f45cf5283d4f08d7eb1d267b4d5c3f4fdb3b88e7c9"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.194497 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerDied","Data":"9a14a12fa0925fa7c29432aa1ea91ad4238ba9bbcd8d6c303a4a6e23d98d992c"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.203288 4721 generic.go:334] "Generic (PLEG): container finished" podID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerID="d0135e82a83e7dd432d606cd974ccad81f99a7740082c819d27aad70f20df54c" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.203315 4721 generic.go:334] "Generic (PLEG): container finished" podID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerID="a42959157e2fd85ce825bcf1a4e95457615a57ce555725522b47bd8f26bf7ac9" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.203322 4721 generic.go:334] "Generic (PLEG): container finished" podID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerID="a325af09c6f45c8e41a6dbcb452cd9644b277e3ad497298b0240f6c9b7e522ef" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.203328 4721 generic.go:334] "Generic (PLEG): container finished" podID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerID="ea3dc07b828742ed47d9ee04d67d8b69338eda5d1113da2c4460899c2c5acb37" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.203334 4721 generic.go:334] "Generic (PLEG): container finished" podID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerID="69ab3857ba197ddd18cda3cf08e05c79f4925f822cd01d76d13743798385b8fb" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.203363 4721 generic.go:334] "Generic (PLEG): container finished" podID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerID="97ca0d88fc5a289cee8c0ab0310c5c97915d5ac70a170da304db86445523758a" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.203378 4721 generic.go:334] "Generic (PLEG): container finished" podID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerID="5a42824206ed29faa1e2a17dae669ab6e5d3adea29dd3b5a786383a72fc3adf1" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.203386 4721 generic.go:334] "Generic (PLEG): container finished" podID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerID="38828a33ce455be19b0e43568db4ffd83b44c1da254ef1943cc620bb9f4f65d2" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.203393 4721 generic.go:334] "Generic (PLEG): container finished" podID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerID="cb4782790aa9a69b2a20327fa4e51e5f4a5f9461eb69c419341fc62846ef8318" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.203399 4721 generic.go:334] "Generic (PLEG): container finished" podID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerID="5ddea8519471235cd203737898d4a84a22947a858c4161d0cdbc538395b90d59" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.203407 4721 generic.go:334] "Generic (PLEG): container finished" podID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerID="18523f1db324b880b4580224bf2ad72c03020e5940c91f1f4e0d34781b24bbd0" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.203414 4721 generic.go:334] "Generic (PLEG): container finished" podID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerID="b17edaa3291f4fcc1070e11c3ba8b7ea2370f6138a9e013896a7afcccc514df5" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.203422 4721 generic.go:334] "Generic (PLEG): container finished" podID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerID="1da308cd6e4b57955810b3f16b8850390d22406c5c67f5271af2bfcb6e30b8d5" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.203473 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerDied","Data":"d0135e82a83e7dd432d606cd974ccad81f99a7740082c819d27aad70f20df54c"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.203498 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerDied","Data":"a42959157e2fd85ce825bcf1a4e95457615a57ce555725522b47bd8f26bf7ac9"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.203508 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerDied","Data":"a325af09c6f45c8e41a6dbcb452cd9644b277e3ad497298b0240f6c9b7e522ef"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.203516 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerDied","Data":"ea3dc07b828742ed47d9ee04d67d8b69338eda5d1113da2c4460899c2c5acb37"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.203525 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerDied","Data":"69ab3857ba197ddd18cda3cf08e05c79f4925f822cd01d76d13743798385b8fb"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.203533 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerDied","Data":"97ca0d88fc5a289cee8c0ab0310c5c97915d5ac70a170da304db86445523758a"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.203541 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerDied","Data":"5a42824206ed29faa1e2a17dae669ab6e5d3adea29dd3b5a786383a72fc3adf1"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.203549 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerDied","Data":"38828a33ce455be19b0e43568db4ffd83b44c1da254ef1943cc620bb9f4f65d2"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.203558 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerDied","Data":"cb4782790aa9a69b2a20327fa4e51e5f4a5f9461eb69c419341fc62846ef8318"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.203566 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerDied","Data":"5ddea8519471235cd203737898d4a84a22947a858c4161d0cdbc538395b90d59"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.203574 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerDied","Data":"18523f1db324b880b4580224bf2ad72c03020e5940c91f1f4e0d34781b24bbd0"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.203583 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerDied","Data":"b17edaa3291f4fcc1070e11c3ba8b7ea2370f6138a9e013896a7afcccc514df5"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.203591 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerDied","Data":"1da308cd6e4b57955810b3f16b8850390d22406c5c67f5271af2bfcb6e30b8d5"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.208569 4721 generic.go:334] "Generic (PLEG): container finished" podID="06581551-03b2-4c6a-add5-cfa6216a461c" containerID="cdc013152076d46bfb55d74405852fea85bf4ac53ddd98ed5ce5742dbf9483b1" exitCode=0 Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.208612 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" event={"ID":"06581551-03b2-4c6a-add5-cfa6216a461c","Type":"ContainerDied","Data":"cdc013152076d46bfb55d74405852fea85bf4ac53ddd98ed5ce5742dbf9483b1"} Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.554894 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.714417 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06581551-03b2-4c6a-add5-cfa6216a461c-run-httpd\") pod \"06581551-03b2-4c6a-add5-cfa6216a461c\" (UID: \"06581551-03b2-4c6a-add5-cfa6216a461c\") " Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.714499 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jx85d\" (UniqueName: \"kubernetes.io/projected/06581551-03b2-4c6a-add5-cfa6216a461c-kube-api-access-jx85d\") pod \"06581551-03b2-4c6a-add5-cfa6216a461c\" (UID: \"06581551-03b2-4c6a-add5-cfa6216a461c\") " Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.714554 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06581551-03b2-4c6a-add5-cfa6216a461c-log-httpd\") pod \"06581551-03b2-4c6a-add5-cfa6216a461c\" (UID: \"06581551-03b2-4c6a-add5-cfa6216a461c\") " Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.714606 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06581551-03b2-4c6a-add5-cfa6216a461c-config-data\") pod \"06581551-03b2-4c6a-add5-cfa6216a461c\" (UID: \"06581551-03b2-4c6a-add5-cfa6216a461c\") " Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.714674 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/06581551-03b2-4c6a-add5-cfa6216a461c-etc-swift\") pod \"06581551-03b2-4c6a-add5-cfa6216a461c\" (UID: \"06581551-03b2-4c6a-add5-cfa6216a461c\") " Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.714801 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06581551-03b2-4c6a-add5-cfa6216a461c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "06581551-03b2-4c6a-add5-cfa6216a461c" (UID: "06581551-03b2-4c6a-add5-cfa6216a461c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.714938 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06581551-03b2-4c6a-add5-cfa6216a461c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "06581551-03b2-4c6a-add5-cfa6216a461c" (UID: "06581551-03b2-4c6a-add5-cfa6216a461c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.715227 4721 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06581551-03b2-4c6a-add5-cfa6216a461c-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.715281 4721 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06581551-03b2-4c6a-add5-cfa6216a461c-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.717886 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06581551-03b2-4c6a-add5-cfa6216a461c-kube-api-access-jx85d" (OuterVolumeSpecName: "kube-api-access-jx85d") pod "06581551-03b2-4c6a-add5-cfa6216a461c" (UID: "06581551-03b2-4c6a-add5-cfa6216a461c"). InnerVolumeSpecName "kube-api-access-jx85d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.718275 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06581551-03b2-4c6a-add5-cfa6216a461c-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "06581551-03b2-4c6a-add5-cfa6216a461c" (UID: "06581551-03b2-4c6a-add5-cfa6216a461c"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.767834 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06581551-03b2-4c6a-add5-cfa6216a461c-config-data" (OuterVolumeSpecName: "config-data") pod "06581551-03b2-4c6a-add5-cfa6216a461c" (UID: "06581551-03b2-4c6a-add5-cfa6216a461c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.817254 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jx85d\" (UniqueName: \"kubernetes.io/projected/06581551-03b2-4c6a-add5-cfa6216a461c-kube-api-access-jx85d\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.817315 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06581551-03b2-4c6a-add5-cfa6216a461c-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:53 crc kubenswrapper[4721]: I1202 12:13:53.817339 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/06581551-03b2-4c6a-add5-cfa6216a461c-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:13:54 crc kubenswrapper[4721]: I1202 12:13:54.221380 4721 generic.go:334] "Generic (PLEG): container finished" podID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerID="355bdf529f7f19ac1a7928e73f70afc680d14b7084acc2ed4a8664677b5a38fc" exitCode=0 Dec 02 12:13:54 crc kubenswrapper[4721]: I1202 12:13:54.221424 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerDied","Data":"355bdf529f7f19ac1a7928e73f70afc680d14b7084acc2ed4a8664677b5a38fc"} Dec 02 12:13:54 crc kubenswrapper[4721]: I1202 12:13:54.223401 4721 generic.go:334] "Generic (PLEG): container finished" podID="06581551-03b2-4c6a-add5-cfa6216a461c" containerID="dd36af16b3f8948d39118eac04affce692461b8fccf54766ba111f311f82cc07" exitCode=0 Dec 02 12:13:54 crc kubenswrapper[4721]: I1202 12:13:54.223486 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" Dec 02 12:13:54 crc kubenswrapper[4721]: I1202 12:13:54.223500 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" event={"ID":"06581551-03b2-4c6a-add5-cfa6216a461c","Type":"ContainerDied","Data":"dd36af16b3f8948d39118eac04affce692461b8fccf54766ba111f311f82cc07"} Dec 02 12:13:54 crc kubenswrapper[4721]: I1202 12:13:54.223525 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb" event={"ID":"06581551-03b2-4c6a-add5-cfa6216a461c","Type":"ContainerDied","Data":"e3786827f2ee63a508b3bd7a90ba0e7e00eb0046600115d57fe64e13cff991c3"} Dec 02 12:13:54 crc kubenswrapper[4721]: I1202 12:13:54.223542 4721 scope.go:117] "RemoveContainer" containerID="dd36af16b3f8948d39118eac04affce692461b8fccf54766ba111f311f82cc07" Dec 02 12:13:54 crc kubenswrapper[4721]: I1202 12:13:54.243675 4721 generic.go:334] "Generic (PLEG): container finished" podID="b8f4669a-6b01-451f-91cc-2786303375db" containerID="bdbbf47a97448d8e3f16f5aeb238ed4d4f54e04bba167e8432c8d3dffe4602ca" exitCode=0 Dec 02 12:13:54 crc kubenswrapper[4721]: I1202 12:13:54.243980 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerDied","Data":"bdbbf47a97448d8e3f16f5aeb238ed4d4f54e04bba167e8432c8d3dffe4602ca"} Dec 02 12:13:54 crc kubenswrapper[4721]: I1202 12:13:54.252477 4721 scope.go:117] "RemoveContainer" containerID="cdc013152076d46bfb55d74405852fea85bf4ac53ddd98ed5ce5742dbf9483b1" Dec 02 12:13:54 crc kubenswrapper[4721]: I1202 12:13:54.258250 4721 generic.go:334] "Generic (PLEG): container finished" podID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerID="17ecf94ee8139633e4482645dfc512d3c308f9254511ea8aab340da5449735b3" exitCode=0 Dec 02 12:13:54 crc kubenswrapper[4721]: I1202 12:13:54.258284 4721 generic.go:334] "Generic (PLEG): container finished" podID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerID="edea0e2ed18478f285ecaae9e57b77f1fcfb4dfd9b94f3ff54c22bc24b3b5df6" exitCode=0 Dec 02 12:13:54 crc kubenswrapper[4721]: I1202 12:13:54.258305 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerDied","Data":"17ecf94ee8139633e4482645dfc512d3c308f9254511ea8aab340da5449735b3"} Dec 02 12:13:54 crc kubenswrapper[4721]: I1202 12:13:54.258329 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerDied","Data":"edea0e2ed18478f285ecaae9e57b77f1fcfb4dfd9b94f3ff54c22bc24b3b5df6"} Dec 02 12:13:54 crc kubenswrapper[4721]: I1202 12:13:54.269890 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb"] Dec 02 12:13:54 crc kubenswrapper[4721]: I1202 12:13:54.270314 4721 scope.go:117] "RemoveContainer" containerID="dd36af16b3f8948d39118eac04affce692461b8fccf54766ba111f311f82cc07" Dec 02 12:13:54 crc kubenswrapper[4721]: E1202 12:13:54.270834 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd36af16b3f8948d39118eac04affce692461b8fccf54766ba111f311f82cc07\": container with ID starting with dd36af16b3f8948d39118eac04affce692461b8fccf54766ba111f311f82cc07 not found: ID does not exist" containerID="dd36af16b3f8948d39118eac04affce692461b8fccf54766ba111f311f82cc07" Dec 02 12:13:54 crc kubenswrapper[4721]: I1202 12:13:54.270873 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd36af16b3f8948d39118eac04affce692461b8fccf54766ba111f311f82cc07"} err="failed to get container status \"dd36af16b3f8948d39118eac04affce692461b8fccf54766ba111f311f82cc07\": rpc error: code = NotFound desc = could not find container \"dd36af16b3f8948d39118eac04affce692461b8fccf54766ba111f311f82cc07\": container with ID starting with dd36af16b3f8948d39118eac04affce692461b8fccf54766ba111f311f82cc07 not found: ID does not exist" Dec 02 12:13:54 crc kubenswrapper[4721]: I1202 12:13:54.270899 4721 scope.go:117] "RemoveContainer" containerID="cdc013152076d46bfb55d74405852fea85bf4ac53ddd98ed5ce5742dbf9483b1" Dec 02 12:13:54 crc kubenswrapper[4721]: E1202 12:13:54.271309 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cdc013152076d46bfb55d74405852fea85bf4ac53ddd98ed5ce5742dbf9483b1\": container with ID starting with cdc013152076d46bfb55d74405852fea85bf4ac53ddd98ed5ce5742dbf9483b1 not found: ID does not exist" containerID="cdc013152076d46bfb55d74405852fea85bf4ac53ddd98ed5ce5742dbf9483b1" Dec 02 12:13:54 crc kubenswrapper[4721]: I1202 12:13:54.271510 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cdc013152076d46bfb55d74405852fea85bf4ac53ddd98ed5ce5742dbf9483b1"} err="failed to get container status \"cdc013152076d46bfb55d74405852fea85bf4ac53ddd98ed5ce5742dbf9483b1\": rpc error: code = NotFound desc = could not find container \"cdc013152076d46bfb55d74405852fea85bf4ac53ddd98ed5ce5742dbf9483b1\": container with ID starting with cdc013152076d46bfb55d74405852fea85bf4ac53ddd98ed5ce5742dbf9483b1 not found: ID does not exist" Dec 02 12:13:54 crc kubenswrapper[4721]: I1202 12:13:54.275432 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-proxy-8b5dc4bcf-zz2cb"] Dec 02 12:13:54 crc kubenswrapper[4721]: I1202 12:13:54.851502 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06581551-03b2-4c6a-add5-cfa6216a461c" path="/var/lib/kubelet/pods/06581551-03b2-4c6a-add5-cfa6216a461c/volumes" Dec 02 12:14:17 crc kubenswrapper[4721]: I1202 12:14:17.770050 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8ww9z"] Dec 02 12:14:17 crc kubenswrapper[4721]: E1202 12:14:17.770860 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06581551-03b2-4c6a-add5-cfa6216a461c" containerName="proxy-httpd" Dec 02 12:14:17 crc kubenswrapper[4721]: I1202 12:14:17.770875 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="06581551-03b2-4c6a-add5-cfa6216a461c" containerName="proxy-httpd" Dec 02 12:14:17 crc kubenswrapper[4721]: E1202 12:14:17.770888 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36d577a8-5ff0-42eb-b739-58b687bd5175" containerName="swift-ring-rebalance" Dec 02 12:14:17 crc kubenswrapper[4721]: I1202 12:14:17.770894 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="36d577a8-5ff0-42eb-b739-58b687bd5175" containerName="swift-ring-rebalance" Dec 02 12:14:17 crc kubenswrapper[4721]: E1202 12:14:17.770919 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06581551-03b2-4c6a-add5-cfa6216a461c" containerName="proxy-server" Dec 02 12:14:17 crc kubenswrapper[4721]: I1202 12:14:17.770925 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="06581551-03b2-4c6a-add5-cfa6216a461c" containerName="proxy-server" Dec 02 12:14:17 crc kubenswrapper[4721]: I1202 12:14:17.771071 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="36d577a8-5ff0-42eb-b739-58b687bd5175" containerName="swift-ring-rebalance" Dec 02 12:14:17 crc kubenswrapper[4721]: I1202 12:14:17.771086 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="06581551-03b2-4c6a-add5-cfa6216a461c" containerName="proxy-httpd" Dec 02 12:14:17 crc kubenswrapper[4721]: I1202 12:14:17.771095 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="06581551-03b2-4c6a-add5-cfa6216a461c" containerName="proxy-server" Dec 02 12:14:17 crc kubenswrapper[4721]: I1202 12:14:17.772308 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8ww9z" Dec 02 12:14:17 crc kubenswrapper[4721]: I1202 12:14:17.794992 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8ww9z"] Dec 02 12:14:17 crc kubenswrapper[4721]: I1202 12:14:17.960388 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f50dc76-abc7-4138-aee1-6c83241c363e-utilities\") pod \"redhat-marketplace-8ww9z\" (UID: \"1f50dc76-abc7-4138-aee1-6c83241c363e\") " pod="openshift-marketplace/redhat-marketplace-8ww9z" Dec 02 12:14:17 crc kubenswrapper[4721]: I1202 12:14:17.960596 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f50dc76-abc7-4138-aee1-6c83241c363e-catalog-content\") pod \"redhat-marketplace-8ww9z\" (UID: \"1f50dc76-abc7-4138-aee1-6c83241c363e\") " pod="openshift-marketplace/redhat-marketplace-8ww9z" Dec 02 12:14:17 crc kubenswrapper[4721]: I1202 12:14:17.960676 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrbb6\" (UniqueName: \"kubernetes.io/projected/1f50dc76-abc7-4138-aee1-6c83241c363e-kube-api-access-nrbb6\") pod \"redhat-marketplace-8ww9z\" (UID: \"1f50dc76-abc7-4138-aee1-6c83241c363e\") " pod="openshift-marketplace/redhat-marketplace-8ww9z" Dec 02 12:14:18 crc kubenswrapper[4721]: I1202 12:14:18.062141 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f50dc76-abc7-4138-aee1-6c83241c363e-catalog-content\") pod \"redhat-marketplace-8ww9z\" (UID: \"1f50dc76-abc7-4138-aee1-6c83241c363e\") " pod="openshift-marketplace/redhat-marketplace-8ww9z" Dec 02 12:14:18 crc kubenswrapper[4721]: I1202 12:14:18.062253 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrbb6\" (UniqueName: \"kubernetes.io/projected/1f50dc76-abc7-4138-aee1-6c83241c363e-kube-api-access-nrbb6\") pod \"redhat-marketplace-8ww9z\" (UID: \"1f50dc76-abc7-4138-aee1-6c83241c363e\") " pod="openshift-marketplace/redhat-marketplace-8ww9z" Dec 02 12:14:18 crc kubenswrapper[4721]: I1202 12:14:18.062324 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f50dc76-abc7-4138-aee1-6c83241c363e-utilities\") pod \"redhat-marketplace-8ww9z\" (UID: \"1f50dc76-abc7-4138-aee1-6c83241c363e\") " pod="openshift-marketplace/redhat-marketplace-8ww9z" Dec 02 12:14:18 crc kubenswrapper[4721]: I1202 12:14:18.062867 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f50dc76-abc7-4138-aee1-6c83241c363e-catalog-content\") pod \"redhat-marketplace-8ww9z\" (UID: \"1f50dc76-abc7-4138-aee1-6c83241c363e\") " pod="openshift-marketplace/redhat-marketplace-8ww9z" Dec 02 12:14:18 crc kubenswrapper[4721]: I1202 12:14:18.062902 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f50dc76-abc7-4138-aee1-6c83241c363e-utilities\") pod \"redhat-marketplace-8ww9z\" (UID: \"1f50dc76-abc7-4138-aee1-6c83241c363e\") " pod="openshift-marketplace/redhat-marketplace-8ww9z" Dec 02 12:14:18 crc kubenswrapper[4721]: I1202 12:14:18.082119 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrbb6\" (UniqueName: \"kubernetes.io/projected/1f50dc76-abc7-4138-aee1-6c83241c363e-kube-api-access-nrbb6\") pod \"redhat-marketplace-8ww9z\" (UID: \"1f50dc76-abc7-4138-aee1-6c83241c363e\") " pod="openshift-marketplace/redhat-marketplace-8ww9z" Dec 02 12:14:18 crc kubenswrapper[4721]: I1202 12:14:18.091468 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8ww9z" Dec 02 12:14:18 crc kubenswrapper[4721]: I1202 12:14:18.506162 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8ww9z"] Dec 02 12:14:19 crc kubenswrapper[4721]: I1202 12:14:19.458477 4721 generic.go:334] "Generic (PLEG): container finished" podID="1f50dc76-abc7-4138-aee1-6c83241c363e" containerID="38e420cc00247380caac1a1f902b62e93571d0e18db3d38c756e158c4a49fa21" exitCode=0 Dec 02 12:14:19 crc kubenswrapper[4721]: I1202 12:14:19.458545 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8ww9z" event={"ID":"1f50dc76-abc7-4138-aee1-6c83241c363e","Type":"ContainerDied","Data":"38e420cc00247380caac1a1f902b62e93571d0e18db3d38c756e158c4a49fa21"} Dec 02 12:14:19 crc kubenswrapper[4721]: I1202 12:14:19.458857 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8ww9z" event={"ID":"1f50dc76-abc7-4138-aee1-6c83241c363e","Type":"ContainerStarted","Data":"5c357c64ef0e7d322f4800471cec72eef9b3995b387d7a90f05cb8ad069f0ff4"} Dec 02 12:14:19 crc kubenswrapper[4721]: I1202 12:14:19.460876 4721 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 12:14:20 crc kubenswrapper[4721]: I1202 12:14:20.469280 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8ww9z" event={"ID":"1f50dc76-abc7-4138-aee1-6c83241c363e","Type":"ContainerStarted","Data":"0549eb0b59d69bad607b62e24c7aba9b8ca35b8d21376a20a3f1265f0424c762"} Dec 02 12:14:21 crc kubenswrapper[4721]: I1202 12:14:21.479387 4721 generic.go:334] "Generic (PLEG): container finished" podID="1f50dc76-abc7-4138-aee1-6c83241c363e" containerID="0549eb0b59d69bad607b62e24c7aba9b8ca35b8d21376a20a3f1265f0424c762" exitCode=0 Dec 02 12:14:21 crc kubenswrapper[4721]: I1202 12:14:21.479793 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8ww9z" event={"ID":"1f50dc76-abc7-4138-aee1-6c83241c363e","Type":"ContainerDied","Data":"0549eb0b59d69bad607b62e24c7aba9b8ca35b8d21376a20a3f1265f0424c762"} Dec 02 12:14:22 crc kubenswrapper[4721]: I1202 12:14:22.352094 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-22p4n"] Dec 02 12:14:22 crc kubenswrapper[4721]: I1202 12:14:22.354054 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-22p4n" Dec 02 12:14:22 crc kubenswrapper[4721]: I1202 12:14:22.371821 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-22p4n"] Dec 02 12:14:22 crc kubenswrapper[4721]: I1202 12:14:22.488821 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8ww9z" event={"ID":"1f50dc76-abc7-4138-aee1-6c83241c363e","Type":"ContainerStarted","Data":"7038db36a71f3b31bdfad66d49d35554533eb65d1aa76f52bf5d9e884e188f71"} Dec 02 12:14:22 crc kubenswrapper[4721]: I1202 12:14:22.508828 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8ww9z" podStartSLOduration=2.91441438 podStartE2EDuration="5.508808386s" podCreationTimestamp="2025-12-02 12:14:17 +0000 UTC" firstStartedPulling="2025-12-02 12:14:19.460627708 +0000 UTC m=+1389.199178067" lastFinishedPulling="2025-12-02 12:14:22.055021724 +0000 UTC m=+1391.793572073" observedRunningTime="2025-12-02 12:14:22.506152034 +0000 UTC m=+1392.244702393" watchObservedRunningTime="2025-12-02 12:14:22.508808386 +0000 UTC m=+1392.247358725" Dec 02 12:14:22 crc kubenswrapper[4721]: I1202 12:14:22.531532 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc7a8167-f9eb-4d3b-8e66-c585f6e2e448-utilities\") pod \"redhat-operators-22p4n\" (UID: \"bc7a8167-f9eb-4d3b-8e66-c585f6e2e448\") " pod="openshift-marketplace/redhat-operators-22p4n" Dec 02 12:14:22 crc kubenswrapper[4721]: I1202 12:14:22.531587 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcdfm\" (UniqueName: \"kubernetes.io/projected/bc7a8167-f9eb-4d3b-8e66-c585f6e2e448-kube-api-access-gcdfm\") pod \"redhat-operators-22p4n\" (UID: \"bc7a8167-f9eb-4d3b-8e66-c585f6e2e448\") " pod="openshift-marketplace/redhat-operators-22p4n" Dec 02 12:14:22 crc kubenswrapper[4721]: I1202 12:14:22.531613 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc7a8167-f9eb-4d3b-8e66-c585f6e2e448-catalog-content\") pod \"redhat-operators-22p4n\" (UID: \"bc7a8167-f9eb-4d3b-8e66-c585f6e2e448\") " pod="openshift-marketplace/redhat-operators-22p4n" Dec 02 12:14:22 crc kubenswrapper[4721]: I1202 12:14:22.633232 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc7a8167-f9eb-4d3b-8e66-c585f6e2e448-utilities\") pod \"redhat-operators-22p4n\" (UID: \"bc7a8167-f9eb-4d3b-8e66-c585f6e2e448\") " pod="openshift-marketplace/redhat-operators-22p4n" Dec 02 12:14:22 crc kubenswrapper[4721]: I1202 12:14:22.633282 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcdfm\" (UniqueName: \"kubernetes.io/projected/bc7a8167-f9eb-4d3b-8e66-c585f6e2e448-kube-api-access-gcdfm\") pod \"redhat-operators-22p4n\" (UID: \"bc7a8167-f9eb-4d3b-8e66-c585f6e2e448\") " pod="openshift-marketplace/redhat-operators-22p4n" Dec 02 12:14:22 crc kubenswrapper[4721]: I1202 12:14:22.633305 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc7a8167-f9eb-4d3b-8e66-c585f6e2e448-catalog-content\") pod \"redhat-operators-22p4n\" (UID: \"bc7a8167-f9eb-4d3b-8e66-c585f6e2e448\") " pod="openshift-marketplace/redhat-operators-22p4n" Dec 02 12:14:22 crc kubenswrapper[4721]: I1202 12:14:22.633712 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc7a8167-f9eb-4d3b-8e66-c585f6e2e448-catalog-content\") pod \"redhat-operators-22p4n\" (UID: \"bc7a8167-f9eb-4d3b-8e66-c585f6e2e448\") " pod="openshift-marketplace/redhat-operators-22p4n" Dec 02 12:14:22 crc kubenswrapper[4721]: I1202 12:14:22.634179 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc7a8167-f9eb-4d3b-8e66-c585f6e2e448-utilities\") pod \"redhat-operators-22p4n\" (UID: \"bc7a8167-f9eb-4d3b-8e66-c585f6e2e448\") " pod="openshift-marketplace/redhat-operators-22p4n" Dec 02 12:14:22 crc kubenswrapper[4721]: I1202 12:14:22.659929 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcdfm\" (UniqueName: \"kubernetes.io/projected/bc7a8167-f9eb-4d3b-8e66-c585f6e2e448-kube-api-access-gcdfm\") pod \"redhat-operators-22p4n\" (UID: \"bc7a8167-f9eb-4d3b-8e66-c585f6e2e448\") " pod="openshift-marketplace/redhat-operators-22p4n" Dec 02 12:14:22 crc kubenswrapper[4721]: I1202 12:14:22.669544 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-22p4n" Dec 02 12:14:22 crc kubenswrapper[4721]: E1202 12:14:22.984125 4721 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod54394d6a_eb39_44ab_b5d3_a38a393991a9.slice/crio-conmon-fa37ab877067b4bd0837ef51174cd21450a84a3f3c933b37297b14c21c679786.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8bf92b26_9c78_4bde_8b52_56263859b9fb.slice/crio-9f93046a00ab18f63db21c1a5dba48ed7daff1a6b7044e4d1da47b18536102d3.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod54394d6a_eb39_44ab_b5d3_a38a393991a9.slice/crio-fa37ab877067b4bd0837ef51174cd21450a84a3f3c933b37297b14c21c679786.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8bf92b26_9c78_4bde_8b52_56263859b9fb.slice/crio-conmon-9f93046a00ab18f63db21c1a5dba48ed7daff1a6b7044e4d1da47b18536102d3.scope\": RecentStats: unable to find data in memory cache]" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.098056 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-22p4n"] Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.132250 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.233131 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.239380 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.242210 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"54394d6a-eb39-44ab-b5d3-a38a393991a9\" (UID: \"54394d6a-eb39-44ab-b5d3-a38a393991a9\") " Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.242272 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t6hwq\" (UniqueName: \"kubernetes.io/projected/54394d6a-eb39-44ab-b5d3-a38a393991a9-kube-api-access-t6hwq\") pod \"54394d6a-eb39-44ab-b5d3-a38a393991a9\" (UID: \"54394d6a-eb39-44ab-b5d3-a38a393991a9\") " Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.242422 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/54394d6a-eb39-44ab-b5d3-a38a393991a9-lock\") pod \"54394d6a-eb39-44ab-b5d3-a38a393991a9\" (UID: \"54394d6a-eb39-44ab-b5d3-a38a393991a9\") " Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.242493 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/54394d6a-eb39-44ab-b5d3-a38a393991a9-etc-swift\") pod \"54394d6a-eb39-44ab-b5d3-a38a393991a9\" (UID: \"54394d6a-eb39-44ab-b5d3-a38a393991a9\") " Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.242540 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/54394d6a-eb39-44ab-b5d3-a38a393991a9-cache\") pod \"54394d6a-eb39-44ab-b5d3-a38a393991a9\" (UID: \"54394d6a-eb39-44ab-b5d3-a38a393991a9\") " Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.243152 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54394d6a-eb39-44ab-b5d3-a38a393991a9-cache" (OuterVolumeSpecName: "cache") pod "54394d6a-eb39-44ab-b5d3-a38a393991a9" (UID: "54394d6a-eb39-44ab-b5d3-a38a393991a9"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.243386 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54394d6a-eb39-44ab-b5d3-a38a393991a9-lock" (OuterVolumeSpecName: "lock") pod "54394d6a-eb39-44ab-b5d3-a38a393991a9" (UID: "54394d6a-eb39-44ab-b5d3-a38a393991a9"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.247910 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "swift") pod "54394d6a-eb39-44ab-b5d3-a38a393991a9" (UID: "54394d6a-eb39-44ab-b5d3-a38a393991a9"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.248700 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54394d6a-eb39-44ab-b5d3-a38a393991a9-kube-api-access-t6hwq" (OuterVolumeSpecName: "kube-api-access-t6hwq") pod "54394d6a-eb39-44ab-b5d3-a38a393991a9" (UID: "54394d6a-eb39-44ab-b5d3-a38a393991a9"). InnerVolumeSpecName "kube-api-access-t6hwq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.252704 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54394d6a-eb39-44ab-b5d3-a38a393991a9-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "54394d6a-eb39-44ab-b5d3-a38a393991a9" (UID: "54394d6a-eb39-44ab-b5d3-a38a393991a9"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.344506 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/b8f4669a-6b01-451f-91cc-2786303375db-cache\") pod \"b8f4669a-6b01-451f-91cc-2786303375db\" (UID: \"b8f4669a-6b01-451f-91cc-2786303375db\") " Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.344553 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/b8f4669a-6b01-451f-91cc-2786303375db-lock\") pod \"b8f4669a-6b01-451f-91cc-2786303375db\" (UID: \"b8f4669a-6b01-451f-91cc-2786303375db\") " Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.344571 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b8f4669a-6b01-451f-91cc-2786303375db-etc-swift\") pod \"b8f4669a-6b01-451f-91cc-2786303375db\" (UID: \"b8f4669a-6b01-451f-91cc-2786303375db\") " Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.344588 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s5ngf\" (UniqueName: \"kubernetes.io/projected/b8f4669a-6b01-451f-91cc-2786303375db-kube-api-access-s5ngf\") pod \"b8f4669a-6b01-451f-91cc-2786303375db\" (UID: \"b8f4669a-6b01-451f-91cc-2786303375db\") " Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.344609 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"b8f4669a-6b01-451f-91cc-2786303375db\" (UID: \"b8f4669a-6b01-451f-91cc-2786303375db\") " Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.344630 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-znzb5\" (UniqueName: \"kubernetes.io/projected/8bf92b26-9c78-4bde-8b52-56263859b9fb-kube-api-access-znzb5\") pod \"8bf92b26-9c78-4bde-8b52-56263859b9fb\" (UID: \"8bf92b26-9c78-4bde-8b52-56263859b9fb\") " Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.344660 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"8bf92b26-9c78-4bde-8b52-56263859b9fb\" (UID: \"8bf92b26-9c78-4bde-8b52-56263859b9fb\") " Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.344688 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/8bf92b26-9c78-4bde-8b52-56263859b9fb-cache\") pod \"8bf92b26-9c78-4bde-8b52-56263859b9fb\" (UID: \"8bf92b26-9c78-4bde-8b52-56263859b9fb\") " Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.344723 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8bf92b26-9c78-4bde-8b52-56263859b9fb-etc-swift\") pod \"8bf92b26-9c78-4bde-8b52-56263859b9fb\" (UID: \"8bf92b26-9c78-4bde-8b52-56263859b9fb\") " Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.344743 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/8bf92b26-9c78-4bde-8b52-56263859b9fb-lock\") pod \"8bf92b26-9c78-4bde-8b52-56263859b9fb\" (UID: \"8bf92b26-9c78-4bde-8b52-56263859b9fb\") " Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.344908 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/54394d6a-eb39-44ab-b5d3-a38a393991a9-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.344918 4721 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/54394d6a-eb39-44ab-b5d3-a38a393991a9-cache\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.344935 4721 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.344944 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t6hwq\" (UniqueName: \"kubernetes.io/projected/54394d6a-eb39-44ab-b5d3-a38a393991a9-kube-api-access-t6hwq\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.344953 4721 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/54394d6a-eb39-44ab-b5d3-a38a393991a9-lock\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.345018 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8f4669a-6b01-451f-91cc-2786303375db-lock" (OuterVolumeSpecName: "lock") pod "b8f4669a-6b01-451f-91cc-2786303375db" (UID: "b8f4669a-6b01-451f-91cc-2786303375db"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.345107 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8f4669a-6b01-451f-91cc-2786303375db-cache" (OuterVolumeSpecName: "cache") pod "b8f4669a-6b01-451f-91cc-2786303375db" (UID: "b8f4669a-6b01-451f-91cc-2786303375db"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.348293 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8f4669a-6b01-451f-91cc-2786303375db-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "b8f4669a-6b01-451f-91cc-2786303375db" (UID: "b8f4669a-6b01-451f-91cc-2786303375db"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.348709 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bf92b26-9c78-4bde-8b52-56263859b9fb-cache" (OuterVolumeSpecName: "cache") pod "8bf92b26-9c78-4bde-8b52-56263859b9fb" (UID: "8bf92b26-9c78-4bde-8b52-56263859b9fb"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.349808 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "swift") pod "b8f4669a-6b01-451f-91cc-2786303375db" (UID: "b8f4669a-6b01-451f-91cc-2786303375db"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.349831 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "swift") pod "8bf92b26-9c78-4bde-8b52-56263859b9fb" (UID: "8bf92b26-9c78-4bde-8b52-56263859b9fb"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.350065 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bf92b26-9c78-4bde-8b52-56263859b9fb-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "8bf92b26-9c78-4bde-8b52-56263859b9fb" (UID: "8bf92b26-9c78-4bde-8b52-56263859b9fb"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.350328 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bf92b26-9c78-4bde-8b52-56263859b9fb-kube-api-access-znzb5" (OuterVolumeSpecName: "kube-api-access-znzb5") pod "8bf92b26-9c78-4bde-8b52-56263859b9fb" (UID: "8bf92b26-9c78-4bde-8b52-56263859b9fb"). InnerVolumeSpecName "kube-api-access-znzb5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.350756 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bf92b26-9c78-4bde-8b52-56263859b9fb-lock" (OuterVolumeSpecName: "lock") pod "8bf92b26-9c78-4bde-8b52-56263859b9fb" (UID: "8bf92b26-9c78-4bde-8b52-56263859b9fb"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.352494 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8f4669a-6b01-451f-91cc-2786303375db-kube-api-access-s5ngf" (OuterVolumeSpecName: "kube-api-access-s5ngf") pod "b8f4669a-6b01-451f-91cc-2786303375db" (UID: "b8f4669a-6b01-451f-91cc-2786303375db"). InnerVolumeSpecName "kube-api-access-s5ngf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.365283 4721 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.445909 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8bf92b26-9c78-4bde-8b52-56263859b9fb-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.445944 4721 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/8bf92b26-9c78-4bde-8b52-56263859b9fb-lock\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.445964 4721 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/b8f4669a-6b01-451f-91cc-2786303375db-cache\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.445977 4721 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/b8f4669a-6b01-451f-91cc-2786303375db-lock\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.445988 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b8f4669a-6b01-451f-91cc-2786303375db-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.445998 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s5ngf\" (UniqueName: \"kubernetes.io/projected/b8f4669a-6b01-451f-91cc-2786303375db-kube-api-access-s5ngf\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.446055 4721 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.446067 4721 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.446078 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-znzb5\" (UniqueName: \"kubernetes.io/projected/8bf92b26-9c78-4bde-8b52-56263859b9fb-kube-api-access-znzb5\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.446094 4721 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.446103 4721 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/8bf92b26-9c78-4bde-8b52-56263859b9fb-cache\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.457547 4721 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.463601 4721 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.503697 4721 generic.go:334] "Generic (PLEG): container finished" podID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerID="9f93046a00ab18f63db21c1a5dba48ed7daff1a6b7044e4d1da47b18536102d3" exitCode=137 Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.503788 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerDied","Data":"9f93046a00ab18f63db21c1a5dba48ed7daff1a6b7044e4d1da47b18536102d3"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.503798 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-2" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.503847 4721 scope.go:117] "RemoveContainer" containerID="9f93046a00ab18f63db21c1a5dba48ed7daff1a6b7044e4d1da47b18536102d3" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.503835 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-2" event={"ID":"8bf92b26-9c78-4bde-8b52-56263859b9fb","Type":"ContainerDied","Data":"a545696ddb58c034f24b57f6b2da97462f5ca97440bcc7274ab84a166bbdc8f6"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.520143 4721 generic.go:334] "Generic (PLEG): container finished" podID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerID="fa37ab877067b4bd0837ef51174cd21450a84a3f3c933b37297b14c21c679786" exitCode=137 Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.520220 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerDied","Data":"fa37ab877067b4bd0837ef51174cd21450a84a3f3c933b37297b14c21c679786"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.520268 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"54394d6a-eb39-44ab-b5d3-a38a393991a9","Type":"ContainerDied","Data":"2333852cfcd2e5bad8bf5c55e7b61f14bdc588989ba9ac8d53cc65ea05088dbe"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.520282 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"97ca0d88fc5a289cee8c0ab0310c5c97915d5ac70a170da304db86445523758a"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.520295 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5a42824206ed29faa1e2a17dae669ab6e5d3adea29dd3b5a786383a72fc3adf1"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.520301 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"38828a33ce455be19b0e43568db4ffd83b44c1da254ef1943cc620bb9f4f65d2"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.520307 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cb4782790aa9a69b2a20327fa4e51e5f4a5f9461eb69c419341fc62846ef8318"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.520312 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"355bdf529f7f19ac1a7928e73f70afc680d14b7084acc2ed4a8664677b5a38fc"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.520317 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5ddea8519471235cd203737898d4a84a22947a858c4161d0cdbc538395b90d59"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.520322 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"18523f1db324b880b4580224bf2ad72c03020e5940c91f1f4e0d34781b24bbd0"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.520327 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b17edaa3291f4fcc1070e11c3ba8b7ea2370f6138a9e013896a7afcccc514df5"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.520332 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1da308cd6e4b57955810b3f16b8850390d22406c5c67f5271af2bfcb6e30b8d5"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.520558 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.522331 4721 generic.go:334] "Generic (PLEG): container finished" podID="bc7a8167-f9eb-4d3b-8e66-c585f6e2e448" containerID="b796c52d41c18abf568ae62cd5db5866715992bab942b7be52a8a7fd8e5df8f1" exitCode=0 Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.522693 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-22p4n" event={"ID":"bc7a8167-f9eb-4d3b-8e66-c585f6e2e448","Type":"ContainerDied","Data":"b796c52d41c18abf568ae62cd5db5866715992bab942b7be52a8a7fd8e5df8f1"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.522738 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-22p4n" event={"ID":"bc7a8167-f9eb-4d3b-8e66-c585f6e2e448","Type":"ContainerStarted","Data":"98d6a7173d49f593286c7d3b2823058caabae4f428241a0d96e83469f69fa6bc"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.526152 4721 scope.go:117] "RemoveContainer" containerID="17ecf94ee8139633e4482645dfc512d3c308f9254511ea8aab340da5449735b3" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539037 4721 generic.go:334] "Generic (PLEG): container finished" podID="b8f4669a-6b01-451f-91cc-2786303375db" containerID="82622747e6978b5288a6f64a0f89c622a9c67f282e2129f1c52ace5096950be4" exitCode=137 Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539141 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-1" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539128 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerDied","Data":"82622747e6978b5288a6f64a0f89c622a9c67f282e2129f1c52ace5096950be4"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539202 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1c7c9162b575caa76db5e414cb7b4015f067355fff84caa7f8ac197367c94e01"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539215 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a09e88795ce2afc38328685e05ceb0f9eb725d61c3b98331e5658b0322ce0e3a"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539222 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ca8d8914238f7696f9bae1ac633efc7bd5b63e29dac383a6626db4cf4f83dcb2"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539227 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"479d0dec2aaa6fd4ccce84dbbf0d0e58053b85167261d3f59757a121591636bc"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539233 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"39c5390ffcbe3c0be784db7ef05672025fcf3d952d626304e297e1590718a897"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539238 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"37cc4ccb7d8d4a2ff89c5372f3428d8517c9cf30a414c165ecbb4c6f1b5fa94c"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539243 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8363b9e2ac15e6050113b7320c011910acd54ded41353912314d366cc137db51"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539255 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ddcdd1ec5d88f5c19c5c33956dd876023076ce8196b01b8b60b11011d978cc74"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539260 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6e9c97886d2281d2db8385c13740c5db13108dbad3318be268566bdf4ef38e60"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539266 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"056786e0cc65aa5944f5f7bf4545d4c3cd9ae097250472dddf1c826cb1ac9231"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539271 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3012afa35630159ed0600ebceef3df1eaf4dfb0728ea48837272b66f5dce0c7b"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539278 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ff082e662a26aeeea6732d9ad265b9acb9da661a4252a9d3dd52302629db893e"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539284 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3c4eacf0568ae73dd6413ce1561622aa8998c530cf8df7cce6ebe9c2c46d80b0"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539289 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bdbbf47a97448d8e3f16f5aeb238ed4d4f54e04bba167e8432c8d3dffe4602ca"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539304 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-1" event={"ID":"b8f4669a-6b01-451f-91cc-2786303375db","Type":"ContainerDied","Data":"32f4271625b8b239a24b916d8594bf7119bd560f8fd7fc53af93e927e228f72e"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539315 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"82622747e6978b5288a6f64a0f89c622a9c67f282e2129f1c52ace5096950be4"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539323 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1c7c9162b575caa76db5e414cb7b4015f067355fff84caa7f8ac197367c94e01"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539328 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a09e88795ce2afc38328685e05ceb0f9eb725d61c3b98331e5658b0322ce0e3a"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539333 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ca8d8914238f7696f9bae1ac633efc7bd5b63e29dac383a6626db4cf4f83dcb2"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539353 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"479d0dec2aaa6fd4ccce84dbbf0d0e58053b85167261d3f59757a121591636bc"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539359 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"39c5390ffcbe3c0be784db7ef05672025fcf3d952d626304e297e1590718a897"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539364 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"37cc4ccb7d8d4a2ff89c5372f3428d8517c9cf30a414c165ecbb4c6f1b5fa94c"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539369 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8363b9e2ac15e6050113b7320c011910acd54ded41353912314d366cc137db51"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539374 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ddcdd1ec5d88f5c19c5c33956dd876023076ce8196b01b8b60b11011d978cc74"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539378 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6e9c97886d2281d2db8385c13740c5db13108dbad3318be268566bdf4ef38e60"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539383 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"056786e0cc65aa5944f5f7bf4545d4c3cd9ae097250472dddf1c826cb1ac9231"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539388 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3012afa35630159ed0600ebceef3df1eaf4dfb0728ea48837272b66f5dce0c7b"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539393 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ff082e662a26aeeea6732d9ad265b9acb9da661a4252a9d3dd52302629db893e"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539398 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3c4eacf0568ae73dd6413ce1561622aa8998c530cf8df7cce6ebe9c2c46d80b0"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.539403 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bdbbf47a97448d8e3f16f5aeb238ed4d4f54e04bba167e8432c8d3dffe4602ca"} Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.544926 4721 scope.go:117] "RemoveContainer" containerID="27cc7a59bb52c7bd6d9de63a62663eb93857c7142b7a434c5f3e8c7bb4b8aa05" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.547285 4721 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.547309 4721 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.571606 4721 scope.go:117] "RemoveContainer" containerID="b13272c7bfd0fb6396c6e0277ad0fe07e2d432578e94c8ecd666fcb45ea827e9" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.583081 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.593261 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-storage-2"] Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.604888 4721 scope.go:117] "RemoveContainer" containerID="fa20ac24780157fa9b52a532ecfc6a01b8357c8e4777d512d203c65148a94f96" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.610584 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.637837 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.646516 4721 scope.go:117] "RemoveContainer" containerID="66c39b5fbb7a7d9d64886db6217c70fa81f42865cdaa296dc478fcca96a41978" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.647124 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.651387 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-storage-1"] Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.671510 4721 scope.go:117] "RemoveContainer" containerID="edea0e2ed18478f285ecaae9e57b77f1fcfb4dfd9b94f3ff54c22bc24b3b5df6" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.687624 4721 scope.go:117] "RemoveContainer" containerID="19bd7e7242eb220985a6143c5b395a3162c141462dd4414ff8c5f615f49002b1" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.702760 4721 scope.go:117] "RemoveContainer" containerID="80210a92576ea67e922d78ea7ac8c50c584e2c56cd8467eb85b5b29838f31334" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.719734 4721 scope.go:117] "RemoveContainer" containerID="a868d42e6fa1c098fc9d1583f04dca38e0be60a59a7c0762d581b1ca1d12bbda" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.741706 4721 scope.go:117] "RemoveContainer" containerID="3c6328f8519642aa83a494e07195a10428397045d3b8562babcc3bd7fa755d16" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.759806 4721 scope.go:117] "RemoveContainer" containerID="afc24a08241150352ee3448002c09defeace534d3cdb24c6c11e113df2658865" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.778274 4721 scope.go:117] "RemoveContainer" containerID="c65cb5a856a1c5496889424f188ddde01dad3f8c58c03eaf04981bec7fcfa362" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.797016 4721 scope.go:117] "RemoveContainer" containerID="f33070308e90b30d9cdd71f45cf5283d4f08d7eb1d267b4d5c3f4fdb3b88e7c9" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.811611 4721 scope.go:117] "RemoveContainer" containerID="9a14a12fa0925fa7c29432aa1ea91ad4238ba9bbcd8d6c303a4a6e23d98d992c" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.828729 4721 scope.go:117] "RemoveContainer" containerID="9f93046a00ab18f63db21c1a5dba48ed7daff1a6b7044e4d1da47b18536102d3" Dec 02 12:14:23 crc kubenswrapper[4721]: E1202 12:14:23.829219 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f93046a00ab18f63db21c1a5dba48ed7daff1a6b7044e4d1da47b18536102d3\": container with ID starting with 9f93046a00ab18f63db21c1a5dba48ed7daff1a6b7044e4d1da47b18536102d3 not found: ID does not exist" containerID="9f93046a00ab18f63db21c1a5dba48ed7daff1a6b7044e4d1da47b18536102d3" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.829251 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f93046a00ab18f63db21c1a5dba48ed7daff1a6b7044e4d1da47b18536102d3"} err="failed to get container status \"9f93046a00ab18f63db21c1a5dba48ed7daff1a6b7044e4d1da47b18536102d3\": rpc error: code = NotFound desc = could not find container \"9f93046a00ab18f63db21c1a5dba48ed7daff1a6b7044e4d1da47b18536102d3\": container with ID starting with 9f93046a00ab18f63db21c1a5dba48ed7daff1a6b7044e4d1da47b18536102d3 not found: ID does not exist" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.829273 4721 scope.go:117] "RemoveContainer" containerID="17ecf94ee8139633e4482645dfc512d3c308f9254511ea8aab340da5449735b3" Dec 02 12:14:23 crc kubenswrapper[4721]: E1202 12:14:23.829682 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17ecf94ee8139633e4482645dfc512d3c308f9254511ea8aab340da5449735b3\": container with ID starting with 17ecf94ee8139633e4482645dfc512d3c308f9254511ea8aab340da5449735b3 not found: ID does not exist" containerID="17ecf94ee8139633e4482645dfc512d3c308f9254511ea8aab340da5449735b3" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.829720 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17ecf94ee8139633e4482645dfc512d3c308f9254511ea8aab340da5449735b3"} err="failed to get container status \"17ecf94ee8139633e4482645dfc512d3c308f9254511ea8aab340da5449735b3\": rpc error: code = NotFound desc = could not find container \"17ecf94ee8139633e4482645dfc512d3c308f9254511ea8aab340da5449735b3\": container with ID starting with 17ecf94ee8139633e4482645dfc512d3c308f9254511ea8aab340da5449735b3 not found: ID does not exist" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.829740 4721 scope.go:117] "RemoveContainer" containerID="27cc7a59bb52c7bd6d9de63a62663eb93857c7142b7a434c5f3e8c7bb4b8aa05" Dec 02 12:14:23 crc kubenswrapper[4721]: E1202 12:14:23.833239 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27cc7a59bb52c7bd6d9de63a62663eb93857c7142b7a434c5f3e8c7bb4b8aa05\": container with ID starting with 27cc7a59bb52c7bd6d9de63a62663eb93857c7142b7a434c5f3e8c7bb4b8aa05 not found: ID does not exist" containerID="27cc7a59bb52c7bd6d9de63a62663eb93857c7142b7a434c5f3e8c7bb4b8aa05" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.833290 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27cc7a59bb52c7bd6d9de63a62663eb93857c7142b7a434c5f3e8c7bb4b8aa05"} err="failed to get container status \"27cc7a59bb52c7bd6d9de63a62663eb93857c7142b7a434c5f3e8c7bb4b8aa05\": rpc error: code = NotFound desc = could not find container \"27cc7a59bb52c7bd6d9de63a62663eb93857c7142b7a434c5f3e8c7bb4b8aa05\": container with ID starting with 27cc7a59bb52c7bd6d9de63a62663eb93857c7142b7a434c5f3e8c7bb4b8aa05 not found: ID does not exist" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.833317 4721 scope.go:117] "RemoveContainer" containerID="b13272c7bfd0fb6396c6e0277ad0fe07e2d432578e94c8ecd666fcb45ea827e9" Dec 02 12:14:23 crc kubenswrapper[4721]: E1202 12:14:23.833695 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b13272c7bfd0fb6396c6e0277ad0fe07e2d432578e94c8ecd666fcb45ea827e9\": container with ID starting with b13272c7bfd0fb6396c6e0277ad0fe07e2d432578e94c8ecd666fcb45ea827e9 not found: ID does not exist" containerID="b13272c7bfd0fb6396c6e0277ad0fe07e2d432578e94c8ecd666fcb45ea827e9" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.833716 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b13272c7bfd0fb6396c6e0277ad0fe07e2d432578e94c8ecd666fcb45ea827e9"} err="failed to get container status \"b13272c7bfd0fb6396c6e0277ad0fe07e2d432578e94c8ecd666fcb45ea827e9\": rpc error: code = NotFound desc = could not find container \"b13272c7bfd0fb6396c6e0277ad0fe07e2d432578e94c8ecd666fcb45ea827e9\": container with ID starting with b13272c7bfd0fb6396c6e0277ad0fe07e2d432578e94c8ecd666fcb45ea827e9 not found: ID does not exist" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.833729 4721 scope.go:117] "RemoveContainer" containerID="fa20ac24780157fa9b52a532ecfc6a01b8357c8e4777d512d203c65148a94f96" Dec 02 12:14:23 crc kubenswrapper[4721]: E1202 12:14:23.834017 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa20ac24780157fa9b52a532ecfc6a01b8357c8e4777d512d203c65148a94f96\": container with ID starting with fa20ac24780157fa9b52a532ecfc6a01b8357c8e4777d512d203c65148a94f96 not found: ID does not exist" containerID="fa20ac24780157fa9b52a532ecfc6a01b8357c8e4777d512d203c65148a94f96" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.834057 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa20ac24780157fa9b52a532ecfc6a01b8357c8e4777d512d203c65148a94f96"} err="failed to get container status \"fa20ac24780157fa9b52a532ecfc6a01b8357c8e4777d512d203c65148a94f96\": rpc error: code = NotFound desc = could not find container \"fa20ac24780157fa9b52a532ecfc6a01b8357c8e4777d512d203c65148a94f96\": container with ID starting with fa20ac24780157fa9b52a532ecfc6a01b8357c8e4777d512d203c65148a94f96 not found: ID does not exist" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.834079 4721 scope.go:117] "RemoveContainer" containerID="66c39b5fbb7a7d9d64886db6217c70fa81f42865cdaa296dc478fcca96a41978" Dec 02 12:14:23 crc kubenswrapper[4721]: E1202 12:14:23.834435 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66c39b5fbb7a7d9d64886db6217c70fa81f42865cdaa296dc478fcca96a41978\": container with ID starting with 66c39b5fbb7a7d9d64886db6217c70fa81f42865cdaa296dc478fcca96a41978 not found: ID does not exist" containerID="66c39b5fbb7a7d9d64886db6217c70fa81f42865cdaa296dc478fcca96a41978" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.834457 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66c39b5fbb7a7d9d64886db6217c70fa81f42865cdaa296dc478fcca96a41978"} err="failed to get container status \"66c39b5fbb7a7d9d64886db6217c70fa81f42865cdaa296dc478fcca96a41978\": rpc error: code = NotFound desc = could not find container \"66c39b5fbb7a7d9d64886db6217c70fa81f42865cdaa296dc478fcca96a41978\": container with ID starting with 66c39b5fbb7a7d9d64886db6217c70fa81f42865cdaa296dc478fcca96a41978 not found: ID does not exist" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.834471 4721 scope.go:117] "RemoveContainer" containerID="edea0e2ed18478f285ecaae9e57b77f1fcfb4dfd9b94f3ff54c22bc24b3b5df6" Dec 02 12:14:23 crc kubenswrapper[4721]: E1202 12:14:23.834659 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"edea0e2ed18478f285ecaae9e57b77f1fcfb4dfd9b94f3ff54c22bc24b3b5df6\": container with ID starting with edea0e2ed18478f285ecaae9e57b77f1fcfb4dfd9b94f3ff54c22bc24b3b5df6 not found: ID does not exist" containerID="edea0e2ed18478f285ecaae9e57b77f1fcfb4dfd9b94f3ff54c22bc24b3b5df6" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.834678 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"edea0e2ed18478f285ecaae9e57b77f1fcfb4dfd9b94f3ff54c22bc24b3b5df6"} err="failed to get container status \"edea0e2ed18478f285ecaae9e57b77f1fcfb4dfd9b94f3ff54c22bc24b3b5df6\": rpc error: code = NotFound desc = could not find container \"edea0e2ed18478f285ecaae9e57b77f1fcfb4dfd9b94f3ff54c22bc24b3b5df6\": container with ID starting with edea0e2ed18478f285ecaae9e57b77f1fcfb4dfd9b94f3ff54c22bc24b3b5df6 not found: ID does not exist" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.834691 4721 scope.go:117] "RemoveContainer" containerID="19bd7e7242eb220985a6143c5b395a3162c141462dd4414ff8c5f615f49002b1" Dec 02 12:14:23 crc kubenswrapper[4721]: E1202 12:14:23.834983 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19bd7e7242eb220985a6143c5b395a3162c141462dd4414ff8c5f615f49002b1\": container with ID starting with 19bd7e7242eb220985a6143c5b395a3162c141462dd4414ff8c5f615f49002b1 not found: ID does not exist" containerID="19bd7e7242eb220985a6143c5b395a3162c141462dd4414ff8c5f615f49002b1" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.835002 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19bd7e7242eb220985a6143c5b395a3162c141462dd4414ff8c5f615f49002b1"} err="failed to get container status \"19bd7e7242eb220985a6143c5b395a3162c141462dd4414ff8c5f615f49002b1\": rpc error: code = NotFound desc = could not find container \"19bd7e7242eb220985a6143c5b395a3162c141462dd4414ff8c5f615f49002b1\": container with ID starting with 19bd7e7242eb220985a6143c5b395a3162c141462dd4414ff8c5f615f49002b1 not found: ID does not exist" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.835014 4721 scope.go:117] "RemoveContainer" containerID="80210a92576ea67e922d78ea7ac8c50c584e2c56cd8467eb85b5b29838f31334" Dec 02 12:14:23 crc kubenswrapper[4721]: E1202 12:14:23.835241 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80210a92576ea67e922d78ea7ac8c50c584e2c56cd8467eb85b5b29838f31334\": container with ID starting with 80210a92576ea67e922d78ea7ac8c50c584e2c56cd8467eb85b5b29838f31334 not found: ID does not exist" containerID="80210a92576ea67e922d78ea7ac8c50c584e2c56cd8467eb85b5b29838f31334" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.835268 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80210a92576ea67e922d78ea7ac8c50c584e2c56cd8467eb85b5b29838f31334"} err="failed to get container status \"80210a92576ea67e922d78ea7ac8c50c584e2c56cd8467eb85b5b29838f31334\": rpc error: code = NotFound desc = could not find container \"80210a92576ea67e922d78ea7ac8c50c584e2c56cd8467eb85b5b29838f31334\": container with ID starting with 80210a92576ea67e922d78ea7ac8c50c584e2c56cd8467eb85b5b29838f31334 not found: ID does not exist" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.835284 4721 scope.go:117] "RemoveContainer" containerID="a868d42e6fa1c098fc9d1583f04dca38e0be60a59a7c0762d581b1ca1d12bbda" Dec 02 12:14:23 crc kubenswrapper[4721]: E1202 12:14:23.835557 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a868d42e6fa1c098fc9d1583f04dca38e0be60a59a7c0762d581b1ca1d12bbda\": container with ID starting with a868d42e6fa1c098fc9d1583f04dca38e0be60a59a7c0762d581b1ca1d12bbda not found: ID does not exist" containerID="a868d42e6fa1c098fc9d1583f04dca38e0be60a59a7c0762d581b1ca1d12bbda" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.835584 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a868d42e6fa1c098fc9d1583f04dca38e0be60a59a7c0762d581b1ca1d12bbda"} err="failed to get container status \"a868d42e6fa1c098fc9d1583f04dca38e0be60a59a7c0762d581b1ca1d12bbda\": rpc error: code = NotFound desc = could not find container \"a868d42e6fa1c098fc9d1583f04dca38e0be60a59a7c0762d581b1ca1d12bbda\": container with ID starting with a868d42e6fa1c098fc9d1583f04dca38e0be60a59a7c0762d581b1ca1d12bbda not found: ID does not exist" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.835606 4721 scope.go:117] "RemoveContainer" containerID="3c6328f8519642aa83a494e07195a10428397045d3b8562babcc3bd7fa755d16" Dec 02 12:14:23 crc kubenswrapper[4721]: E1202 12:14:23.835806 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c6328f8519642aa83a494e07195a10428397045d3b8562babcc3bd7fa755d16\": container with ID starting with 3c6328f8519642aa83a494e07195a10428397045d3b8562babcc3bd7fa755d16 not found: ID does not exist" containerID="3c6328f8519642aa83a494e07195a10428397045d3b8562babcc3bd7fa755d16" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.835832 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c6328f8519642aa83a494e07195a10428397045d3b8562babcc3bd7fa755d16"} err="failed to get container status \"3c6328f8519642aa83a494e07195a10428397045d3b8562babcc3bd7fa755d16\": rpc error: code = NotFound desc = could not find container \"3c6328f8519642aa83a494e07195a10428397045d3b8562babcc3bd7fa755d16\": container with ID starting with 3c6328f8519642aa83a494e07195a10428397045d3b8562babcc3bd7fa755d16 not found: ID does not exist" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.835847 4721 scope.go:117] "RemoveContainer" containerID="afc24a08241150352ee3448002c09defeace534d3cdb24c6c11e113df2658865" Dec 02 12:14:23 crc kubenswrapper[4721]: E1202 12:14:23.836252 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"afc24a08241150352ee3448002c09defeace534d3cdb24c6c11e113df2658865\": container with ID starting with afc24a08241150352ee3448002c09defeace534d3cdb24c6c11e113df2658865 not found: ID does not exist" containerID="afc24a08241150352ee3448002c09defeace534d3cdb24c6c11e113df2658865" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.836286 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"afc24a08241150352ee3448002c09defeace534d3cdb24c6c11e113df2658865"} err="failed to get container status \"afc24a08241150352ee3448002c09defeace534d3cdb24c6c11e113df2658865\": rpc error: code = NotFound desc = could not find container \"afc24a08241150352ee3448002c09defeace534d3cdb24c6c11e113df2658865\": container with ID starting with afc24a08241150352ee3448002c09defeace534d3cdb24c6c11e113df2658865 not found: ID does not exist" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.836302 4721 scope.go:117] "RemoveContainer" containerID="c65cb5a856a1c5496889424f188ddde01dad3f8c58c03eaf04981bec7fcfa362" Dec 02 12:14:23 crc kubenswrapper[4721]: E1202 12:14:23.836702 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c65cb5a856a1c5496889424f188ddde01dad3f8c58c03eaf04981bec7fcfa362\": container with ID starting with c65cb5a856a1c5496889424f188ddde01dad3f8c58c03eaf04981bec7fcfa362 not found: ID does not exist" containerID="c65cb5a856a1c5496889424f188ddde01dad3f8c58c03eaf04981bec7fcfa362" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.836723 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c65cb5a856a1c5496889424f188ddde01dad3f8c58c03eaf04981bec7fcfa362"} err="failed to get container status \"c65cb5a856a1c5496889424f188ddde01dad3f8c58c03eaf04981bec7fcfa362\": rpc error: code = NotFound desc = could not find container \"c65cb5a856a1c5496889424f188ddde01dad3f8c58c03eaf04981bec7fcfa362\": container with ID starting with c65cb5a856a1c5496889424f188ddde01dad3f8c58c03eaf04981bec7fcfa362 not found: ID does not exist" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.836738 4721 scope.go:117] "RemoveContainer" containerID="f33070308e90b30d9cdd71f45cf5283d4f08d7eb1d267b4d5c3f4fdb3b88e7c9" Dec 02 12:14:23 crc kubenswrapper[4721]: E1202 12:14:23.837005 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f33070308e90b30d9cdd71f45cf5283d4f08d7eb1d267b4d5c3f4fdb3b88e7c9\": container with ID starting with f33070308e90b30d9cdd71f45cf5283d4f08d7eb1d267b4d5c3f4fdb3b88e7c9 not found: ID does not exist" containerID="f33070308e90b30d9cdd71f45cf5283d4f08d7eb1d267b4d5c3f4fdb3b88e7c9" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.837030 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f33070308e90b30d9cdd71f45cf5283d4f08d7eb1d267b4d5c3f4fdb3b88e7c9"} err="failed to get container status \"f33070308e90b30d9cdd71f45cf5283d4f08d7eb1d267b4d5c3f4fdb3b88e7c9\": rpc error: code = NotFound desc = could not find container \"f33070308e90b30d9cdd71f45cf5283d4f08d7eb1d267b4d5c3f4fdb3b88e7c9\": container with ID starting with f33070308e90b30d9cdd71f45cf5283d4f08d7eb1d267b4d5c3f4fdb3b88e7c9 not found: ID does not exist" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.837046 4721 scope.go:117] "RemoveContainer" containerID="9a14a12fa0925fa7c29432aa1ea91ad4238ba9bbcd8d6c303a4a6e23d98d992c" Dec 02 12:14:23 crc kubenswrapper[4721]: E1202 12:14:23.837352 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a14a12fa0925fa7c29432aa1ea91ad4238ba9bbcd8d6c303a4a6e23d98d992c\": container with ID starting with 9a14a12fa0925fa7c29432aa1ea91ad4238ba9bbcd8d6c303a4a6e23d98d992c not found: ID does not exist" containerID="9a14a12fa0925fa7c29432aa1ea91ad4238ba9bbcd8d6c303a4a6e23d98d992c" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.837467 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a14a12fa0925fa7c29432aa1ea91ad4238ba9bbcd8d6c303a4a6e23d98d992c"} err="failed to get container status \"9a14a12fa0925fa7c29432aa1ea91ad4238ba9bbcd8d6c303a4a6e23d98d992c\": rpc error: code = NotFound desc = could not find container \"9a14a12fa0925fa7c29432aa1ea91ad4238ba9bbcd8d6c303a4a6e23d98d992c\": container with ID starting with 9a14a12fa0925fa7c29432aa1ea91ad4238ba9bbcd8d6c303a4a6e23d98d992c not found: ID does not exist" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.837545 4721 scope.go:117] "RemoveContainer" containerID="fa37ab877067b4bd0837ef51174cd21450a84a3f3c933b37297b14c21c679786" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.852955 4721 scope.go:117] "RemoveContainer" containerID="d0135e82a83e7dd432d606cd974ccad81f99a7740082c819d27aad70f20df54c" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.868360 4721 scope.go:117] "RemoveContainer" containerID="a42959157e2fd85ce825bcf1a4e95457615a57ce555725522b47bd8f26bf7ac9" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.882645 4721 scope.go:117] "RemoveContainer" containerID="a325af09c6f45c8e41a6dbcb452cd9644b277e3ad497298b0240f6c9b7e522ef" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.898352 4721 scope.go:117] "RemoveContainer" containerID="ea3dc07b828742ed47d9ee04d67d8b69338eda5d1113da2c4460899c2c5acb37" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.916260 4721 scope.go:117] "RemoveContainer" containerID="69ab3857ba197ddd18cda3cf08e05c79f4925f822cd01d76d13743798385b8fb" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.941858 4721 scope.go:117] "RemoveContainer" containerID="97ca0d88fc5a289cee8c0ab0310c5c97915d5ac70a170da304db86445523758a" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.955886 4721 scope.go:117] "RemoveContainer" containerID="5a42824206ed29faa1e2a17dae669ab6e5d3adea29dd3b5a786383a72fc3adf1" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.968052 4721 scope.go:117] "RemoveContainer" containerID="38828a33ce455be19b0e43568db4ffd83b44c1da254ef1943cc620bb9f4f65d2" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.981311 4721 scope.go:117] "RemoveContainer" containerID="cb4782790aa9a69b2a20327fa4e51e5f4a5f9461eb69c419341fc62846ef8318" Dec 02 12:14:23 crc kubenswrapper[4721]: I1202 12:14:23.996111 4721 scope.go:117] "RemoveContainer" containerID="355bdf529f7f19ac1a7928e73f70afc680d14b7084acc2ed4a8664677b5a38fc" Dec 02 12:14:24 crc kubenswrapper[4721]: I1202 12:14:24.010266 4721 scope.go:117] "RemoveContainer" containerID="5ddea8519471235cd203737898d4a84a22947a858c4161d0cdbc538395b90d59" Dec 02 12:14:24 crc kubenswrapper[4721]: I1202 12:14:24.028127 4721 scope.go:117] "RemoveContainer" containerID="18523f1db324b880b4580224bf2ad72c03020e5940c91f1f4e0d34781b24bbd0" Dec 02 12:14:24 crc kubenswrapper[4721]: I1202 12:14:24.041828 4721 scope.go:117] "RemoveContainer" containerID="b17edaa3291f4fcc1070e11c3ba8b7ea2370f6138a9e013896a7afcccc514df5" Dec 02 12:14:24 crc kubenswrapper[4721]: I1202 12:14:24.054941 4721 scope.go:117] "RemoveContainer" containerID="1da308cd6e4b57955810b3f16b8850390d22406c5c67f5271af2bfcb6e30b8d5" Dec 02 12:14:24 crc kubenswrapper[4721]: I1202 12:14:24.066959 4721 scope.go:117] "RemoveContainer" containerID="fa37ab877067b4bd0837ef51174cd21450a84a3f3c933b37297b14c21c679786" Dec 02 12:14:24 crc kubenswrapper[4721]: E1202 12:14:24.067255 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa37ab877067b4bd0837ef51174cd21450a84a3f3c933b37297b14c21c679786\": container with ID starting with fa37ab877067b4bd0837ef51174cd21450a84a3f3c933b37297b14c21c679786 not found: ID does not exist" containerID="fa37ab877067b4bd0837ef51174cd21450a84a3f3c933b37297b14c21c679786" Dec 02 12:14:24 crc kubenswrapper[4721]: I1202 12:14:24.067292 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa37ab877067b4bd0837ef51174cd21450a84a3f3c933b37297b14c21c679786"} err="failed to get container status \"fa37ab877067b4bd0837ef51174cd21450a84a3f3c933b37297b14c21c679786\": rpc error: code = NotFound desc = could not find container \"fa37ab877067b4bd0837ef51174cd21450a84a3f3c933b37297b14c21c679786\": container with ID starting with fa37ab877067b4bd0837ef51174cd21450a84a3f3c933b37297b14c21c679786 not found: ID does not exist" Dec 02 12:14:24 crc kubenswrapper[4721]: I1202 12:14:24.067315 4721 scope.go:117] "RemoveContainer" containerID="d0135e82a83e7dd432d606cd974ccad81f99a7740082c819d27aad70f20df54c" Dec 02 12:14:24 crc kubenswrapper[4721]: E1202 12:14:24.067624 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0135e82a83e7dd432d606cd974ccad81f99a7740082c819d27aad70f20df54c\": container with ID starting with d0135e82a83e7dd432d606cd974ccad81f99a7740082c819d27aad70f20df54c not found: ID does not exist" containerID="d0135e82a83e7dd432d606cd974ccad81f99a7740082c819d27aad70f20df54c" Dec 02 12:14:24 crc kubenswrapper[4721]: I1202 12:14:24.067662 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0135e82a83e7dd432d606cd974ccad81f99a7740082c819d27aad70f20df54c"} err="failed to get container status \"d0135e82a83e7dd432d606cd974ccad81f99a7740082c819d27aad70f20df54c\": rpc error: code = NotFound desc = could not find container \"d0135e82a83e7dd432d606cd974ccad81f99a7740082c819d27aad70f20df54c\": container with ID starting with d0135e82a83e7dd432d606cd974ccad81f99a7740082c819d27aad70f20df54c not found: ID does not exist" Dec 02 12:14:24 crc kubenswrapper[4721]: I1202 12:14:24.067689 4721 scope.go:117] "RemoveContainer" containerID="a42959157e2fd85ce825bcf1a4e95457615a57ce555725522b47bd8f26bf7ac9" Dec 02 12:14:24 crc kubenswrapper[4721]: E1202 12:14:24.067955 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a42959157e2fd85ce825bcf1a4e95457615a57ce555725522b47bd8f26bf7ac9\": container with ID starting with a42959157e2fd85ce825bcf1a4e95457615a57ce555725522b47bd8f26bf7ac9 not found: ID does not exist" containerID="a42959157e2fd85ce825bcf1a4e95457615a57ce555725522b47bd8f26bf7ac9" Dec 02 12:14:24 crc kubenswrapper[4721]: I1202 12:14:24.067984 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a42959157e2fd85ce825bcf1a4e95457615a57ce555725522b47bd8f26bf7ac9"} err="failed to get container status \"a42959157e2fd85ce825bcf1a4e95457615a57ce555725522b47bd8f26bf7ac9\": rpc error: code = NotFound desc = could not find container \"a42959157e2fd85ce825bcf1a4e95457615a57ce555725522b47bd8f26bf7ac9\": container with ID starting with a42959157e2fd85ce825bcf1a4e95457615a57ce555725522b47bd8f26bf7ac9 not found: ID does not exist" Dec 02 12:14:24 crc kubenswrapper[4721]: I1202 12:14:24.067999 4721 scope.go:117] "RemoveContainer" containerID="a325af09c6f45c8e41a6dbcb452cd9644b277e3ad497298b0240f6c9b7e522ef" Dec 02 12:14:24 crc kubenswrapper[4721]: E1202 12:14:24.068209 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a325af09c6f45c8e41a6dbcb452cd9644b277e3ad497298b0240f6c9b7e522ef\": container with ID starting with a325af09c6f45c8e41a6dbcb452cd9644b277e3ad497298b0240f6c9b7e522ef not found: ID does not exist" containerID="a325af09c6f45c8e41a6dbcb452cd9644b277e3ad497298b0240f6c9b7e522ef" Dec 02 12:14:24 crc kubenswrapper[4721]: I1202 12:14:24.068237 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a325af09c6f45c8e41a6dbcb452cd9644b277e3ad497298b0240f6c9b7e522ef"} err="failed to get container status \"a325af09c6f45c8e41a6dbcb452cd9644b277e3ad497298b0240f6c9b7e522ef\": rpc error: code = NotFound desc = could not find container \"a325af09c6f45c8e41a6dbcb452cd9644b277e3ad497298b0240f6c9b7e522ef\": container with ID starting with a325af09c6f45c8e41a6dbcb452cd9644b277e3ad497298b0240f6c9b7e522ef not found: ID does not exist" Dec 02 12:14:24 crc kubenswrapper[4721]: I1202 12:14:24.068254 4721 scope.go:117] "RemoveContainer" containerID="ea3dc07b828742ed47d9ee04d67d8b69338eda5d1113da2c4460899c2c5acb37" Dec 02 12:14:24 crc kubenswrapper[4721]: E1202 12:14:24.068464 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea3dc07b828742ed47d9ee04d67d8b69338eda5d1113da2c4460899c2c5acb37\": container with ID starting with ea3dc07b828742ed47d9ee04d67d8b69338eda5d1113da2c4460899c2c5acb37 not found: ID does not exist" containerID="ea3dc07b828742ed47d9ee04d67d8b69338eda5d1113da2c4460899c2c5acb37" Dec 02 12:14:24 crc kubenswrapper[4721]: I1202 12:14:24.068488 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea3dc07b828742ed47d9ee04d67d8b69338eda5d1113da2c4460899c2c5acb37"} err="failed to get container status \"ea3dc07b828742ed47d9ee04d67d8b69338eda5d1113da2c4460899c2c5acb37\": rpc error: code = NotFound desc = could not find container \"ea3dc07b828742ed47d9ee04d67d8b69338eda5d1113da2c4460899c2c5acb37\": container with ID starting with ea3dc07b828742ed47d9ee04d67d8b69338eda5d1113da2c4460899c2c5acb37 not found: ID does not exist" Dec 02 12:14:24 crc kubenswrapper[4721]: I1202 12:14:24.068501 4721 scope.go:117] "RemoveContainer" containerID="69ab3857ba197ddd18cda3cf08e05c79f4925f822cd01d76d13743798385b8fb" Dec 02 12:14:24 crc kubenswrapper[4721]: E1202 12:14:24.068777 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69ab3857ba197ddd18cda3cf08e05c79f4925f822cd01d76d13743798385b8fb\": container with ID starting with 69ab3857ba197ddd18cda3cf08e05c79f4925f822cd01d76d13743798385b8fb not found: ID does not exist" containerID="69ab3857ba197ddd18cda3cf08e05c79f4925f822cd01d76d13743798385b8fb" Dec 02 12:14:24 crc kubenswrapper[4721]: I1202 12:14:24.068794 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69ab3857ba197ddd18cda3cf08e05c79f4925f822cd01d76d13743798385b8fb"} err="failed to get container status \"69ab3857ba197ddd18cda3cf08e05c79f4925f822cd01d76d13743798385b8fb\": rpc error: code = NotFound desc = could not find container \"69ab3857ba197ddd18cda3cf08e05c79f4925f822cd01d76d13743798385b8fb\": container with ID starting with 69ab3857ba197ddd18cda3cf08e05c79f4925f822cd01d76d13743798385b8fb not found: ID does not exist" Dec 02 12:14:24 crc kubenswrapper[4721]: I1202 12:14:24.068806 4721 scope.go:117] "RemoveContainer" containerID="82622747e6978b5288a6f64a0f89c622a9c67f282e2129f1c52ace5096950be4" Dec 02 12:14:24 crc kubenswrapper[4721]: I1202 12:14:24.851907 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" path="/var/lib/kubelet/pods/54394d6a-eb39-44ab-b5d3-a38a393991a9/volumes" Dec 02 12:14:24 crc kubenswrapper[4721]: I1202 12:14:24.854275 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" path="/var/lib/kubelet/pods/8bf92b26-9c78-4bde-8b52-56263859b9fb/volumes" Dec 02 12:14:24 crc kubenswrapper[4721]: I1202 12:14:24.857067 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8f4669a-6b01-451f-91cc-2786303375db" path="/var/lib/kubelet/pods/b8f4669a-6b01-451f-91cc-2786303375db/volumes" Dec 02 12:14:25 crc kubenswrapper[4721]: I1202 12:14:25.562316 4721 generic.go:334] "Generic (PLEG): container finished" podID="bc7a8167-f9eb-4d3b-8e66-c585f6e2e448" containerID="9b8bd65c3c40a5343d66d721a7cb13c30d15d3e5e0e8dd765f2d0d657cf3a174" exitCode=0 Dec 02 12:14:25 crc kubenswrapper[4721]: I1202 12:14:25.562370 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-22p4n" event={"ID":"bc7a8167-f9eb-4d3b-8e66-c585f6e2e448","Type":"ContainerDied","Data":"9b8bd65c3c40a5343d66d721a7cb13c30d15d3e5e0e8dd765f2d0d657cf3a174"} Dec 02 12:14:25 crc kubenswrapper[4721]: I1202 12:14:25.703121 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mcr6s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 12:14:25 crc kubenswrapper[4721]: I1202 12:14:25.703180 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.570646 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571279 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="account-replicator" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571294 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="account-replicator" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571310 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="account-replicator" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571319 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="account-replicator" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571331 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="container-auditor" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571356 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="container-auditor" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571375 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="account-replicator" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571382 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="account-replicator" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571389 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="account-reaper" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571397 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="account-reaper" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571409 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="object-server" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571415 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="object-server" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571428 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="object-auditor" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571434 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="object-auditor" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571449 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="object-expirer" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571458 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="object-expirer" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571469 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="object-server" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571477 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="object-server" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571489 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="account-auditor" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571497 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="account-auditor" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571506 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="container-replicator" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571514 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="container-replicator" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571522 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="container-server" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571530 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="container-server" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571540 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="swift-recon-cron" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571547 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="swift-recon-cron" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571561 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="object-expirer" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571567 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="object-expirer" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571578 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="account-server" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571585 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="account-server" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571595 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="object-server" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571602 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="object-server" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571610 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="account-reaper" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571617 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="account-reaper" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571625 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="rsync" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571632 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="rsync" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571644 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="container-server" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571652 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="container-server" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571664 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="container-replicator" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571671 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="container-replicator" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571683 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="container-updater" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571691 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="container-updater" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571715 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="object-auditor" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571723 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="object-auditor" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571732 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="container-auditor" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571740 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="container-auditor" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571748 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="account-auditor" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571755 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="account-auditor" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571764 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="object-auditor" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571781 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="object-auditor" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571793 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="container-updater" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571801 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="container-updater" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571814 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="account-server" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571821 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="account-server" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571829 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="account-auditor" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571836 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="account-auditor" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571846 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="rsync" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571853 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="rsync" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571863 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="container-auditor" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571870 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="container-auditor" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571881 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="container-replicator" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571890 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="container-replicator" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571901 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="swift-recon-cron" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571910 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="swift-recon-cron" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571924 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="object-updater" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571930 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="object-updater" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571938 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="object-replicator" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571945 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="object-replicator" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571955 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="object-replicator" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571962 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="object-replicator" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571971 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="object-updater" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571979 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="object-updater" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.571988 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="object-replicator" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.571996 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="object-replicator" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.572007 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="container-server" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572015 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="container-server" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.572027 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="rsync" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572035 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="rsync" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.572044 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="object-updater" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572051 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="object-updater" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.572064 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="account-server" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572071 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="account-server" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.572082 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="account-reaper" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572089 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="account-reaper" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.572098 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="swift-recon-cron" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572106 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="swift-recon-cron" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.572114 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="container-updater" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572121 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="container-updater" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.572129 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="object-expirer" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572136 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="object-expirer" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572303 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="account-server" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572323 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="account-reaper" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572333 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="object-replicator" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572361 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="container-replicator" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572372 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="container-updater" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572381 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="object-server" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572390 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="container-auditor" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572398 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="object-updater" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572409 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="object-auditor" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572419 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="account-auditor" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572427 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="account-reaper" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572441 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="container-replicator" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572450 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="account-replicator" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572476 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="object-updater" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572485 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="account-server" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572496 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="account-auditor" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572508 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="container-updater" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572518 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="swift-recon-cron" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572528 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="object-updater" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572538 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="rsync" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572547 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="container-replicator" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572554 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="object-replicator" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572562 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="account-replicator" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572567 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="container-auditor" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572574 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="container-server" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572584 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="account-replicator" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572591 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="container-server" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572597 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="object-auditor" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572605 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="swift-recon-cron" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572615 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="account-auditor" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572621 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="object-replicator" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572641 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="container-updater" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572650 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="account-reaper" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572655 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="container-auditor" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572662 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="object-expirer" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572669 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="object-server" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572675 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="object-auditor" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572682 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="rsync" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572689 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="object-server" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572695 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="object-expirer" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572702 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="object-expirer" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572710 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="swift-recon-cron" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572726 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="54394d6a-eb39-44ab-b5d3-a38a393991a9" containerName="container-server" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572734 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bf92b26-9c78-4bde-8b52-56263859b9fb" containerName="rsync" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.572742 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8f4669a-6b01-451f-91cc-2786303375db" containerName="account-server" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.576255 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-22p4n" event={"ID":"bc7a8167-f9eb-4d3b-8e66-c585f6e2e448","Type":"ContainerStarted","Data":"b0ee22269ab003aa5d3374488f0a8ac678b6e5f54629d8b808108df7f7c803e9"} Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.576420 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.580083 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-conf" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.580196 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-storage-config-data" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.580301 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-swift-dockercfg-pzb6z" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.582132 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-files" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.602027 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.689796 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/6db5858b-86d7-4b98-8d7f-26317521edcd-lock\") pod \"swift-storage-0\" (UID: \"6db5858b-86d7-4b98-8d7f-26317521edcd\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.689874 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxm4w\" (UniqueName: \"kubernetes.io/projected/6db5858b-86d7-4b98-8d7f-26317521edcd-kube-api-access-gxm4w\") pod \"swift-storage-0\" (UID: \"6db5858b-86d7-4b98-8d7f-26317521edcd\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.689940 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"6db5858b-86d7-4b98-8d7f-26317521edcd\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.689966 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/6db5858b-86d7-4b98-8d7f-26317521edcd-cache\") pod \"swift-storage-0\" (UID: \"6db5858b-86d7-4b98-8d7f-26317521edcd\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.690002 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6db5858b-86d7-4b98-8d7f-26317521edcd-etc-swift\") pod \"swift-storage-0\" (UID: \"6db5858b-86d7-4b98-8d7f-26317521edcd\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.791503 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6db5858b-86d7-4b98-8d7f-26317521edcd-etc-swift\") pod \"swift-storage-0\" (UID: \"6db5858b-86d7-4b98-8d7f-26317521edcd\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.791578 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/6db5858b-86d7-4b98-8d7f-26317521edcd-lock\") pod \"swift-storage-0\" (UID: \"6db5858b-86d7-4b98-8d7f-26317521edcd\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.791611 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxm4w\" (UniqueName: \"kubernetes.io/projected/6db5858b-86d7-4b98-8d7f-26317521edcd-kube-api-access-gxm4w\") pod \"swift-storage-0\" (UID: \"6db5858b-86d7-4b98-8d7f-26317521edcd\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.791655 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"6db5858b-86d7-4b98-8d7f-26317521edcd\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.791674 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/6db5858b-86d7-4b98-8d7f-26317521edcd-cache\") pod \"swift-storage-0\" (UID: \"6db5858b-86d7-4b98-8d7f-26317521edcd\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.792119 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/6db5858b-86d7-4b98-8d7f-26317521edcd-cache\") pod \"swift-storage-0\" (UID: \"6db5858b-86d7-4b98-8d7f-26317521edcd\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.792219 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.792232 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 02 12:14:26 crc kubenswrapper[4721]: E1202 12:14:26.792271 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/6db5858b-86d7-4b98-8d7f-26317521edcd-etc-swift podName:6db5858b-86d7-4b98-8d7f-26317521edcd nodeName:}" failed. No retries permitted until 2025-12-02 12:14:27.292254564 +0000 UTC m=+1397.030804913 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/6db5858b-86d7-4b98-8d7f-26317521edcd-etc-swift") pod "swift-storage-0" (UID: "6db5858b-86d7-4b98-8d7f-26317521edcd") : configmap "swift-ring-files" not found Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.792668 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/6db5858b-86d7-4b98-8d7f-26317521edcd-lock\") pod \"swift-storage-0\" (UID: \"6db5858b-86d7-4b98-8d7f-26317521edcd\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.793202 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"6db5858b-86d7-4b98-8d7f-26317521edcd\") device mount path \"/mnt/openstack/pv03\"" pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.816257 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"6db5858b-86d7-4b98-8d7f-26317521edcd\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.819024 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxm4w\" (UniqueName: \"kubernetes.io/projected/6db5858b-86d7-4b98-8d7f-26317521edcd-kube-api-access-gxm4w\") pod \"swift-storage-0\" (UID: \"6db5858b-86d7-4b98-8d7f-26317521edcd\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.917149 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-22p4n" podStartSLOduration=2.064472617 podStartE2EDuration="4.917128271s" podCreationTimestamp="2025-12-02 12:14:22 +0000 UTC" firstStartedPulling="2025-12-02 12:14:23.525550723 +0000 UTC m=+1393.264101072" lastFinishedPulling="2025-12-02 12:14:26.378206367 +0000 UTC m=+1396.116756726" observedRunningTime="2025-12-02 12:14:26.669254945 +0000 UTC m=+1396.407805294" watchObservedRunningTime="2025-12-02 12:14:26.917128271 +0000 UTC m=+1396.655678620" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.922579 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-l6w9c"] Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.923353 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-l6w9c" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.926669 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.931251 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.931286 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-proxy-config-data" Dec 02 12:14:26 crc kubenswrapper[4721]: I1202 12:14:26.948796 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-l6w9c"] Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.097162 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-etc-swift\") pod \"swift-ring-rebalance-l6w9c\" (UID: \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\") " pod="swift-kuttl-tests/swift-ring-rebalance-l6w9c" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.097207 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-dispersionconf\") pod \"swift-ring-rebalance-l6w9c\" (UID: \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\") " pod="swift-kuttl-tests/swift-ring-rebalance-l6w9c" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.097243 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-swiftconf\") pod \"swift-ring-rebalance-l6w9c\" (UID: \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\") " pod="swift-kuttl-tests/swift-ring-rebalance-l6w9c" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.097273 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9z62g\" (UniqueName: \"kubernetes.io/projected/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-kube-api-access-9z62g\") pod \"swift-ring-rebalance-l6w9c\" (UID: \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\") " pod="swift-kuttl-tests/swift-ring-rebalance-l6w9c" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.097308 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-ring-data-devices\") pod \"swift-ring-rebalance-l6w9c\" (UID: \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\") " pod="swift-kuttl-tests/swift-ring-rebalance-l6w9c" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.097337 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-scripts\") pod \"swift-ring-rebalance-l6w9c\" (UID: \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\") " pod="swift-kuttl-tests/swift-ring-rebalance-l6w9c" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.192068 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb"] Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.193068 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.198223 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-scripts\") pod \"swift-ring-rebalance-l6w9c\" (UID: \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\") " pod="swift-kuttl-tests/swift-ring-rebalance-l6w9c" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.198511 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-etc-swift\") pod \"swift-ring-rebalance-l6w9c\" (UID: \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\") " pod="swift-kuttl-tests/swift-ring-rebalance-l6w9c" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.198593 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-dispersionconf\") pod \"swift-ring-rebalance-l6w9c\" (UID: \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\") " pod="swift-kuttl-tests/swift-ring-rebalance-l6w9c" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.198708 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-swiftconf\") pod \"swift-ring-rebalance-l6w9c\" (UID: \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\") " pod="swift-kuttl-tests/swift-ring-rebalance-l6w9c" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.198996 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-scripts\") pod \"swift-ring-rebalance-l6w9c\" (UID: \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\") " pod="swift-kuttl-tests/swift-ring-rebalance-l6w9c" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.199060 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-etc-swift\") pod \"swift-ring-rebalance-l6w9c\" (UID: \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\") " pod="swift-kuttl-tests/swift-ring-rebalance-l6w9c" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.198950 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9z62g\" (UniqueName: \"kubernetes.io/projected/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-kube-api-access-9z62g\") pod \"swift-ring-rebalance-l6w9c\" (UID: \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\") " pod="swift-kuttl-tests/swift-ring-rebalance-l6w9c" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.199518 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-ring-data-devices\") pod \"swift-ring-rebalance-l6w9c\" (UID: \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\") " pod="swift-kuttl-tests/swift-ring-rebalance-l6w9c" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.200049 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-ring-data-devices\") pod \"swift-ring-rebalance-l6w9c\" (UID: \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\") " pod="swift-kuttl-tests/swift-ring-rebalance-l6w9c" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.203169 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-swiftconf\") pod \"swift-ring-rebalance-l6w9c\" (UID: \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\") " pod="swift-kuttl-tests/swift-ring-rebalance-l6w9c" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.203235 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-dispersionconf\") pod \"swift-ring-rebalance-l6w9c\" (UID: \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\") " pod="swift-kuttl-tests/swift-ring-rebalance-l6w9c" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.209409 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb"] Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.221514 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9z62g\" (UniqueName: \"kubernetes.io/projected/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-kube-api-access-9z62g\") pod \"swift-ring-rebalance-l6w9c\" (UID: \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\") " pod="swift-kuttl-tests/swift-ring-rebalance-l6w9c" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.242674 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-l6w9c" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.300819 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efcd5748-285b-4688-a236-ce8ac1a5eac2-log-httpd\") pod \"swift-proxy-b65ff67f9-p72vb\" (UID: \"efcd5748-285b-4688-a236-ce8ac1a5eac2\") " pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.300877 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6q76\" (UniqueName: \"kubernetes.io/projected/efcd5748-285b-4688-a236-ce8ac1a5eac2-kube-api-access-w6q76\") pod \"swift-proxy-b65ff67f9-p72vb\" (UID: \"efcd5748-285b-4688-a236-ce8ac1a5eac2\") " pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.300913 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efcd5748-285b-4688-a236-ce8ac1a5eac2-run-httpd\") pod \"swift-proxy-b65ff67f9-p72vb\" (UID: \"efcd5748-285b-4688-a236-ce8ac1a5eac2\") " pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.300952 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6db5858b-86d7-4b98-8d7f-26317521edcd-etc-swift\") pod \"swift-storage-0\" (UID: \"6db5858b-86d7-4b98-8d7f-26317521edcd\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.301032 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/efcd5748-285b-4688-a236-ce8ac1a5eac2-etc-swift\") pod \"swift-proxy-b65ff67f9-p72vb\" (UID: \"efcd5748-285b-4688-a236-ce8ac1a5eac2\") " pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.301051 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efcd5748-285b-4688-a236-ce8ac1a5eac2-config-data\") pod \"swift-proxy-b65ff67f9-p72vb\" (UID: \"efcd5748-285b-4688-a236-ce8ac1a5eac2\") " pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" Dec 02 12:14:27 crc kubenswrapper[4721]: E1202 12:14:27.301228 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:14:27 crc kubenswrapper[4721]: E1202 12:14:27.301259 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 02 12:14:27 crc kubenswrapper[4721]: E1202 12:14:27.301302 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/6db5858b-86d7-4b98-8d7f-26317521edcd-etc-swift podName:6db5858b-86d7-4b98-8d7f-26317521edcd nodeName:}" failed. No retries permitted until 2025-12-02 12:14:28.301289098 +0000 UTC m=+1398.039839447 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/6db5858b-86d7-4b98-8d7f-26317521edcd-etc-swift") pod "swift-storage-0" (UID: "6db5858b-86d7-4b98-8d7f-26317521edcd") : configmap "swift-ring-files" not found Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.402416 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efcd5748-285b-4688-a236-ce8ac1a5eac2-run-httpd\") pod \"swift-proxy-b65ff67f9-p72vb\" (UID: \"efcd5748-285b-4688-a236-ce8ac1a5eac2\") " pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.402884 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efcd5748-285b-4688-a236-ce8ac1a5eac2-run-httpd\") pod \"swift-proxy-b65ff67f9-p72vb\" (UID: \"efcd5748-285b-4688-a236-ce8ac1a5eac2\") " pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.402996 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/efcd5748-285b-4688-a236-ce8ac1a5eac2-etc-swift\") pod \"swift-proxy-b65ff67f9-p72vb\" (UID: \"efcd5748-285b-4688-a236-ce8ac1a5eac2\") " pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.403034 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efcd5748-285b-4688-a236-ce8ac1a5eac2-config-data\") pod \"swift-proxy-b65ff67f9-p72vb\" (UID: \"efcd5748-285b-4688-a236-ce8ac1a5eac2\") " pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.403082 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efcd5748-285b-4688-a236-ce8ac1a5eac2-log-httpd\") pod \"swift-proxy-b65ff67f9-p72vb\" (UID: \"efcd5748-285b-4688-a236-ce8ac1a5eac2\") " pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.403185 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6q76\" (UniqueName: \"kubernetes.io/projected/efcd5748-285b-4688-a236-ce8ac1a5eac2-kube-api-access-w6q76\") pod \"swift-proxy-b65ff67f9-p72vb\" (UID: \"efcd5748-285b-4688-a236-ce8ac1a5eac2\") " pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" Dec 02 12:14:27 crc kubenswrapper[4721]: E1202 12:14:27.403124 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:14:27 crc kubenswrapper[4721]: E1202 12:14:27.403239 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb: configmap "swift-ring-files" not found Dec 02 12:14:27 crc kubenswrapper[4721]: E1202 12:14:27.403294 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/efcd5748-285b-4688-a236-ce8ac1a5eac2-etc-swift podName:efcd5748-285b-4688-a236-ce8ac1a5eac2 nodeName:}" failed. No retries permitted until 2025-12-02 12:14:27.90327526 +0000 UTC m=+1397.641825609 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/efcd5748-285b-4688-a236-ce8ac1a5eac2-etc-swift") pod "swift-proxy-b65ff67f9-p72vb" (UID: "efcd5748-285b-4688-a236-ce8ac1a5eac2") : configmap "swift-ring-files" not found Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.403413 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efcd5748-285b-4688-a236-ce8ac1a5eac2-log-httpd\") pod \"swift-proxy-b65ff67f9-p72vb\" (UID: \"efcd5748-285b-4688-a236-ce8ac1a5eac2\") " pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.412537 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efcd5748-285b-4688-a236-ce8ac1a5eac2-config-data\") pod \"swift-proxy-b65ff67f9-p72vb\" (UID: \"efcd5748-285b-4688-a236-ce8ac1a5eac2\") " pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.439178 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6q76\" (UniqueName: \"kubernetes.io/projected/efcd5748-285b-4688-a236-ce8ac1a5eac2-kube-api-access-w6q76\") pod \"swift-proxy-b65ff67f9-p72vb\" (UID: \"efcd5748-285b-4688-a236-ce8ac1a5eac2\") " pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.484036 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-l6w9c"] Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.583987 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-l6w9c" event={"ID":"e1fe9d21-19eb-4f82-a953-4dc4284bedf5","Type":"ContainerStarted","Data":"98a01574e289b53433b63631a6db28766e620d06cc885c75075a90370a89d9ac"} Dec 02 12:14:27 crc kubenswrapper[4721]: I1202 12:14:27.911751 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/efcd5748-285b-4688-a236-ce8ac1a5eac2-etc-swift\") pod \"swift-proxy-b65ff67f9-p72vb\" (UID: \"efcd5748-285b-4688-a236-ce8ac1a5eac2\") " pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" Dec 02 12:14:27 crc kubenswrapper[4721]: E1202 12:14:27.911937 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:14:27 crc kubenswrapper[4721]: E1202 12:14:27.912133 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb: configmap "swift-ring-files" not found Dec 02 12:14:27 crc kubenswrapper[4721]: E1202 12:14:27.912197 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/efcd5748-285b-4688-a236-ce8ac1a5eac2-etc-swift podName:efcd5748-285b-4688-a236-ce8ac1a5eac2 nodeName:}" failed. No retries permitted until 2025-12-02 12:14:28.912177 +0000 UTC m=+1398.650727349 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/efcd5748-285b-4688-a236-ce8ac1a5eac2-etc-swift") pod "swift-proxy-b65ff67f9-p72vb" (UID: "efcd5748-285b-4688-a236-ce8ac1a5eac2") : configmap "swift-ring-files" not found Dec 02 12:14:28 crc kubenswrapper[4721]: I1202 12:14:28.091795 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8ww9z" Dec 02 12:14:28 crc kubenswrapper[4721]: I1202 12:14:28.091856 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8ww9z" Dec 02 12:14:28 crc kubenswrapper[4721]: I1202 12:14:28.135692 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8ww9z" Dec 02 12:14:28 crc kubenswrapper[4721]: I1202 12:14:28.316771 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6db5858b-86d7-4b98-8d7f-26317521edcd-etc-swift\") pod \"swift-storage-0\" (UID: \"6db5858b-86d7-4b98-8d7f-26317521edcd\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:14:28 crc kubenswrapper[4721]: E1202 12:14:28.317134 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:14:28 crc kubenswrapper[4721]: E1202 12:14:28.317230 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 02 12:14:28 crc kubenswrapper[4721]: E1202 12:14:28.317331 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/6db5858b-86d7-4b98-8d7f-26317521edcd-etc-swift podName:6db5858b-86d7-4b98-8d7f-26317521edcd nodeName:}" failed. No retries permitted until 2025-12-02 12:14:30.317316773 +0000 UTC m=+1400.055867122 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/6db5858b-86d7-4b98-8d7f-26317521edcd-etc-swift") pod "swift-storage-0" (UID: "6db5858b-86d7-4b98-8d7f-26317521edcd") : configmap "swift-ring-files" not found Dec 02 12:14:28 crc kubenswrapper[4721]: I1202 12:14:28.593075 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-l6w9c" event={"ID":"e1fe9d21-19eb-4f82-a953-4dc4284bedf5","Type":"ContainerStarted","Data":"20751d56bae21081ffb6f6e3c04b3159a9de2e729362ea5a49ab32939dbedc2c"} Dec 02 12:14:28 crc kubenswrapper[4721]: I1202 12:14:28.635794 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8ww9z" Dec 02 12:14:28 crc kubenswrapper[4721]: I1202 12:14:28.657448 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-l6w9c" podStartSLOduration=2.657417842 podStartE2EDuration="2.657417842s" podCreationTimestamp="2025-12-02 12:14:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:14:28.610190777 +0000 UTC m=+1398.348741126" watchObservedRunningTime="2025-12-02 12:14:28.657417842 +0000 UTC m=+1398.395968191" Dec 02 12:14:28 crc kubenswrapper[4721]: I1202 12:14:28.927008 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/efcd5748-285b-4688-a236-ce8ac1a5eac2-etc-swift\") pod \"swift-proxy-b65ff67f9-p72vb\" (UID: \"efcd5748-285b-4688-a236-ce8ac1a5eac2\") " pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" Dec 02 12:14:28 crc kubenswrapper[4721]: E1202 12:14:28.927190 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:14:28 crc kubenswrapper[4721]: E1202 12:14:28.927211 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb: configmap "swift-ring-files" not found Dec 02 12:14:28 crc kubenswrapper[4721]: E1202 12:14:28.927260 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/efcd5748-285b-4688-a236-ce8ac1a5eac2-etc-swift podName:efcd5748-285b-4688-a236-ce8ac1a5eac2 nodeName:}" failed. No retries permitted until 2025-12-02 12:14:30.927242676 +0000 UTC m=+1400.665793035 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/efcd5748-285b-4688-a236-ce8ac1a5eac2-etc-swift") pod "swift-proxy-b65ff67f9-p72vb" (UID: "efcd5748-285b-4688-a236-ce8ac1a5eac2") : configmap "swift-ring-files" not found Dec 02 12:14:29 crc kubenswrapper[4721]: I1202 12:14:29.344577 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8ww9z"] Dec 02 12:14:30 crc kubenswrapper[4721]: I1202 12:14:30.346203 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6db5858b-86d7-4b98-8d7f-26317521edcd-etc-swift\") pod \"swift-storage-0\" (UID: \"6db5858b-86d7-4b98-8d7f-26317521edcd\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:14:30 crc kubenswrapper[4721]: E1202 12:14:30.346436 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:14:30 crc kubenswrapper[4721]: E1202 12:14:30.346466 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 02 12:14:30 crc kubenswrapper[4721]: E1202 12:14:30.346522 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/6db5858b-86d7-4b98-8d7f-26317521edcd-etc-swift podName:6db5858b-86d7-4b98-8d7f-26317521edcd nodeName:}" failed. No retries permitted until 2025-12-02 12:14:34.346505605 +0000 UTC m=+1404.085055954 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/6db5858b-86d7-4b98-8d7f-26317521edcd-etc-swift") pod "swift-storage-0" (UID: "6db5858b-86d7-4b98-8d7f-26317521edcd") : configmap "swift-ring-files" not found Dec 02 12:14:30 crc kubenswrapper[4721]: I1202 12:14:30.609889 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8ww9z" podUID="1f50dc76-abc7-4138-aee1-6c83241c363e" containerName="registry-server" containerID="cri-o://7038db36a71f3b31bdfad66d49d35554533eb65d1aa76f52bf5d9e884e188f71" gracePeriod=2 Dec 02 12:14:30 crc kubenswrapper[4721]: I1202 12:14:30.954895 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/efcd5748-285b-4688-a236-ce8ac1a5eac2-etc-swift\") pod \"swift-proxy-b65ff67f9-p72vb\" (UID: \"efcd5748-285b-4688-a236-ce8ac1a5eac2\") " pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" Dec 02 12:14:30 crc kubenswrapper[4721]: E1202 12:14:30.955097 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:14:30 crc kubenswrapper[4721]: E1202 12:14:30.955126 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb: configmap "swift-ring-files" not found Dec 02 12:14:30 crc kubenswrapper[4721]: E1202 12:14:30.955185 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/efcd5748-285b-4688-a236-ce8ac1a5eac2-etc-swift podName:efcd5748-285b-4688-a236-ce8ac1a5eac2 nodeName:}" failed. No retries permitted until 2025-12-02 12:14:34.955168549 +0000 UTC m=+1404.693718898 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/efcd5748-285b-4688-a236-ce8ac1a5eac2-etc-swift") pod "swift-proxy-b65ff67f9-p72vb" (UID: "efcd5748-285b-4688-a236-ce8ac1a5eac2") : configmap "swift-ring-files" not found Dec 02 12:14:31 crc kubenswrapper[4721]: I1202 12:14:31.618084 4721 generic.go:334] "Generic (PLEG): container finished" podID="1f50dc76-abc7-4138-aee1-6c83241c363e" containerID="7038db36a71f3b31bdfad66d49d35554533eb65d1aa76f52bf5d9e884e188f71" exitCode=0 Dec 02 12:14:31 crc kubenswrapper[4721]: I1202 12:14:31.618140 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8ww9z" event={"ID":"1f50dc76-abc7-4138-aee1-6c83241c363e","Type":"ContainerDied","Data":"7038db36a71f3b31bdfad66d49d35554533eb65d1aa76f52bf5d9e884e188f71"} Dec 02 12:14:32 crc kubenswrapper[4721]: I1202 12:14:32.087201 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8ww9z" Dec 02 12:14:32 crc kubenswrapper[4721]: I1202 12:14:32.270798 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f50dc76-abc7-4138-aee1-6c83241c363e-catalog-content\") pod \"1f50dc76-abc7-4138-aee1-6c83241c363e\" (UID: \"1f50dc76-abc7-4138-aee1-6c83241c363e\") " Dec 02 12:14:32 crc kubenswrapper[4721]: I1202 12:14:32.270858 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nrbb6\" (UniqueName: \"kubernetes.io/projected/1f50dc76-abc7-4138-aee1-6c83241c363e-kube-api-access-nrbb6\") pod \"1f50dc76-abc7-4138-aee1-6c83241c363e\" (UID: \"1f50dc76-abc7-4138-aee1-6c83241c363e\") " Dec 02 12:14:32 crc kubenswrapper[4721]: I1202 12:14:32.270924 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f50dc76-abc7-4138-aee1-6c83241c363e-utilities\") pod \"1f50dc76-abc7-4138-aee1-6c83241c363e\" (UID: \"1f50dc76-abc7-4138-aee1-6c83241c363e\") " Dec 02 12:14:32 crc kubenswrapper[4721]: I1202 12:14:32.271774 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f50dc76-abc7-4138-aee1-6c83241c363e-utilities" (OuterVolumeSpecName: "utilities") pod "1f50dc76-abc7-4138-aee1-6c83241c363e" (UID: "1f50dc76-abc7-4138-aee1-6c83241c363e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:14:32 crc kubenswrapper[4721]: I1202 12:14:32.276252 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f50dc76-abc7-4138-aee1-6c83241c363e-kube-api-access-nrbb6" (OuterVolumeSpecName: "kube-api-access-nrbb6") pod "1f50dc76-abc7-4138-aee1-6c83241c363e" (UID: "1f50dc76-abc7-4138-aee1-6c83241c363e"). InnerVolumeSpecName "kube-api-access-nrbb6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:14:32 crc kubenswrapper[4721]: I1202 12:14:32.289621 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f50dc76-abc7-4138-aee1-6c83241c363e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1f50dc76-abc7-4138-aee1-6c83241c363e" (UID: "1f50dc76-abc7-4138-aee1-6c83241c363e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:14:32 crc kubenswrapper[4721]: I1202 12:14:32.372206 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f50dc76-abc7-4138-aee1-6c83241c363e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:32 crc kubenswrapper[4721]: I1202 12:14:32.372258 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nrbb6\" (UniqueName: \"kubernetes.io/projected/1f50dc76-abc7-4138-aee1-6c83241c363e-kube-api-access-nrbb6\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:32 crc kubenswrapper[4721]: I1202 12:14:32.372268 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f50dc76-abc7-4138-aee1-6c83241c363e-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:32 crc kubenswrapper[4721]: I1202 12:14:32.627323 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8ww9z" event={"ID":"1f50dc76-abc7-4138-aee1-6c83241c363e","Type":"ContainerDied","Data":"5c357c64ef0e7d322f4800471cec72eef9b3995b387d7a90f05cb8ad069f0ff4"} Dec 02 12:14:32 crc kubenswrapper[4721]: I1202 12:14:32.627390 4721 scope.go:117] "RemoveContainer" containerID="7038db36a71f3b31bdfad66d49d35554533eb65d1aa76f52bf5d9e884e188f71" Dec 02 12:14:32 crc kubenswrapper[4721]: I1202 12:14:32.627425 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8ww9z" Dec 02 12:14:32 crc kubenswrapper[4721]: I1202 12:14:32.656208 4721 scope.go:117] "RemoveContainer" containerID="0549eb0b59d69bad607b62e24c7aba9b8ca35b8d21376a20a3f1265f0424c762" Dec 02 12:14:32 crc kubenswrapper[4721]: I1202 12:14:32.662099 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8ww9z"] Dec 02 12:14:32 crc kubenswrapper[4721]: I1202 12:14:32.667644 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8ww9z"] Dec 02 12:14:32 crc kubenswrapper[4721]: I1202 12:14:32.670699 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-22p4n" Dec 02 12:14:32 crc kubenswrapper[4721]: I1202 12:14:32.670728 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-22p4n" Dec 02 12:14:32 crc kubenswrapper[4721]: I1202 12:14:32.673538 4721 scope.go:117] "RemoveContainer" containerID="38e420cc00247380caac1a1f902b62e93571d0e18db3d38c756e158c4a49fa21" Dec 02 12:14:32 crc kubenswrapper[4721]: I1202 12:14:32.708574 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-22p4n" Dec 02 12:14:32 crc kubenswrapper[4721]: I1202 12:14:32.848104 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f50dc76-abc7-4138-aee1-6c83241c363e" path="/var/lib/kubelet/pods/1f50dc76-abc7-4138-aee1-6c83241c363e/volumes" Dec 02 12:14:33 crc kubenswrapper[4721]: I1202 12:14:33.726988 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-22p4n" Dec 02 12:14:34 crc kubenswrapper[4721]: I1202 12:14:34.406276 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6db5858b-86d7-4b98-8d7f-26317521edcd-etc-swift\") pod \"swift-storage-0\" (UID: \"6db5858b-86d7-4b98-8d7f-26317521edcd\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:14:34 crc kubenswrapper[4721]: I1202 12:14:34.412741 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6db5858b-86d7-4b98-8d7f-26317521edcd-etc-swift\") pod \"swift-storage-0\" (UID: \"6db5858b-86d7-4b98-8d7f-26317521edcd\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:14:34 crc kubenswrapper[4721]: I1202 12:14:34.447599 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:14:34 crc kubenswrapper[4721]: I1202 12:14:34.662592 4721 generic.go:334] "Generic (PLEG): container finished" podID="e1fe9d21-19eb-4f82-a953-4dc4284bedf5" containerID="20751d56bae21081ffb6f6e3c04b3159a9de2e729362ea5a49ab32939dbedc2c" exitCode=0 Dec 02 12:14:34 crc kubenswrapper[4721]: I1202 12:14:34.662678 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-l6w9c" event={"ID":"e1fe9d21-19eb-4f82-a953-4dc4284bedf5","Type":"ContainerDied","Data":"20751d56bae21081ffb6f6e3c04b3159a9de2e729362ea5a49ab32939dbedc2c"} Dec 02 12:14:34 crc kubenswrapper[4721]: I1202 12:14:34.888585 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:14:34 crc kubenswrapper[4721]: I1202 12:14:34.943868 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-22p4n"] Dec 02 12:14:35 crc kubenswrapper[4721]: I1202 12:14:35.015031 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/efcd5748-285b-4688-a236-ce8ac1a5eac2-etc-swift\") pod \"swift-proxy-b65ff67f9-p72vb\" (UID: \"efcd5748-285b-4688-a236-ce8ac1a5eac2\") " pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" Dec 02 12:14:35 crc kubenswrapper[4721]: I1202 12:14:35.020062 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/efcd5748-285b-4688-a236-ce8ac1a5eac2-etc-swift\") pod \"swift-proxy-b65ff67f9-p72vb\" (UID: \"efcd5748-285b-4688-a236-ce8ac1a5eac2\") " pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" Dec 02 12:14:35 crc kubenswrapper[4721]: I1202 12:14:35.066468 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" Dec 02 12:14:35 crc kubenswrapper[4721]: I1202 12:14:35.523971 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb"] Dec 02 12:14:35 crc kubenswrapper[4721]: W1202 12:14:35.548965 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podefcd5748_285b_4688_a236_ce8ac1a5eac2.slice/crio-288e16ab816234e88fe7c17562ac8f01c96b4995602641931217a15d972d2218 WatchSource:0}: Error finding container 288e16ab816234e88fe7c17562ac8f01c96b4995602641931217a15d972d2218: Status 404 returned error can't find the container with id 288e16ab816234e88fe7c17562ac8f01c96b4995602641931217a15d972d2218 Dec 02 12:14:35 crc kubenswrapper[4721]: I1202 12:14:35.671202 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerStarted","Data":"e54cab2b7d3edd263c5595f6f3d7856b8ded9f45b924ea2258f2310f0bcb78e0"} Dec 02 12:14:35 crc kubenswrapper[4721]: I1202 12:14:35.671600 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerStarted","Data":"ceba2f7ea1ea7e91acbf1eb79a5785106a1bc050744f606a1d3b56029227918e"} Dec 02 12:14:35 crc kubenswrapper[4721]: I1202 12:14:35.671611 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerStarted","Data":"2cfec86320141b34671bd8e2d97ae669ef7a39cb1960c8cbd3aa4d055ec17502"} Dec 02 12:14:35 crc kubenswrapper[4721]: I1202 12:14:35.671620 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerStarted","Data":"5c2c257e405a3ee5ab9a42fba81bc67e11d37f0db4b2fde8c43de0c19923c5d7"} Dec 02 12:14:35 crc kubenswrapper[4721]: I1202 12:14:35.671628 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerStarted","Data":"85d68ffea577b93a4624f507b238f2628cc169525e7f05e8507f26eb06472f9b"} Dec 02 12:14:35 crc kubenswrapper[4721]: I1202 12:14:35.672575 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" event={"ID":"efcd5748-285b-4688-a236-ce8ac1a5eac2","Type":"ContainerStarted","Data":"288e16ab816234e88fe7c17562ac8f01c96b4995602641931217a15d972d2218"} Dec 02 12:14:35 crc kubenswrapper[4721]: I1202 12:14:35.672733 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-22p4n" podUID="bc7a8167-f9eb-4d3b-8e66-c585f6e2e448" containerName="registry-server" containerID="cri-o://b0ee22269ab003aa5d3374488f0a8ac678b6e5f54629d8b808108df7f7c803e9" gracePeriod=2 Dec 02 12:14:35 crc kubenswrapper[4721]: I1202 12:14:35.898809 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-l6w9c" Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.028169 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-dispersionconf\") pod \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\" (UID: \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\") " Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.028252 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-etc-swift\") pod \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\" (UID: \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\") " Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.028317 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-swiftconf\") pod \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\" (UID: \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\") " Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.028389 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9z62g\" (UniqueName: \"kubernetes.io/projected/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-kube-api-access-9z62g\") pod \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\" (UID: \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\") " Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.028497 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-ring-data-devices\") pod \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\" (UID: \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\") " Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.028520 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-scripts\") pod \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\" (UID: \"e1fe9d21-19eb-4f82-a953-4dc4284bedf5\") " Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.029447 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "e1fe9d21-19eb-4f82-a953-4dc4284bedf5" (UID: "e1fe9d21-19eb-4f82-a953-4dc4284bedf5"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.029533 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "e1fe9d21-19eb-4f82-a953-4dc4284bedf5" (UID: "e1fe9d21-19eb-4f82-a953-4dc4284bedf5"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.036495 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-kube-api-access-9z62g" (OuterVolumeSpecName: "kube-api-access-9z62g") pod "e1fe9d21-19eb-4f82-a953-4dc4284bedf5" (UID: "e1fe9d21-19eb-4f82-a953-4dc4284bedf5"). InnerVolumeSpecName "kube-api-access-9z62g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.048999 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "e1fe9d21-19eb-4f82-a953-4dc4284bedf5" (UID: "e1fe9d21-19eb-4f82-a953-4dc4284bedf5"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.049552 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-scripts" (OuterVolumeSpecName: "scripts") pod "e1fe9d21-19eb-4f82-a953-4dc4284bedf5" (UID: "e1fe9d21-19eb-4f82-a953-4dc4284bedf5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.054087 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "e1fe9d21-19eb-4f82-a953-4dc4284bedf5" (UID: "e1fe9d21-19eb-4f82-a953-4dc4284bedf5"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.130795 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.130834 4721 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.130850 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.130861 4721 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.130874 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9z62g\" (UniqueName: \"kubernetes.io/projected/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-kube-api-access-9z62g\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.130884 4721 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/e1fe9d21-19eb-4f82-a953-4dc4284bedf5-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.683668 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" event={"ID":"efcd5748-285b-4688-a236-ce8ac1a5eac2","Type":"ContainerStarted","Data":"f367a7d7b15f755dadad4ade45d664fc281bfc84700a74d6b13ac14be03f1a2a"} Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.684064 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.684083 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.684095 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" event={"ID":"efcd5748-285b-4688-a236-ce8ac1a5eac2","Type":"ContainerStarted","Data":"064427ce236d0cba912f9ffc4e8d58622ad094250303cdd840aa774414ab1288"} Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.685113 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-l6w9c" event={"ID":"e1fe9d21-19eb-4f82-a953-4dc4284bedf5","Type":"ContainerDied","Data":"98a01574e289b53433b63631a6db28766e620d06cc885c75075a90370a89d9ac"} Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.685144 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="98a01574e289b53433b63631a6db28766e620d06cc885c75075a90370a89d9ac" Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.685120 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-l6w9c" Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.689761 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerStarted","Data":"f83ef1e3f11eec89afa078c016ae6a62e6fff25cf605054ac2689db17732cf00"} Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.689795 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerStarted","Data":"337a09f75ad47f99ff3cbbe394913ec228ec14745401e2d14a462b4ed81e8cfa"} Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.689810 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerStarted","Data":"71d647bca013c32f1eaa51f96cd2319fdcb6c8a80ef7e365b131af59699fe5e1"} Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.689821 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerStarted","Data":"3b1fe7983cbd31d453c7a672e66adcc08d3417de0998a8a9df68e3bafe3a87b2"} Dec 02 12:14:36 crc kubenswrapper[4721]: I1202 12:14:36.713893 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" podStartSLOduration=9.713874932 podStartE2EDuration="9.713874932s" podCreationTimestamp="2025-12-02 12:14:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:14:36.713597643 +0000 UTC m=+1406.452148012" watchObservedRunningTime="2025-12-02 12:14:36.713874932 +0000 UTC m=+1406.452425281" Dec 02 12:14:37 crc kubenswrapper[4721]: I1202 12:14:37.726173 4721 generic.go:334] "Generic (PLEG): container finished" podID="bc7a8167-f9eb-4d3b-8e66-c585f6e2e448" containerID="b0ee22269ab003aa5d3374488f0a8ac678b6e5f54629d8b808108df7f7c803e9" exitCode=0 Dec 02 12:14:37 crc kubenswrapper[4721]: I1202 12:14:37.726334 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-22p4n" event={"ID":"bc7a8167-f9eb-4d3b-8e66-c585f6e2e448","Type":"ContainerDied","Data":"b0ee22269ab003aa5d3374488f0a8ac678b6e5f54629d8b808108df7f7c803e9"} Dec 02 12:14:37 crc kubenswrapper[4721]: I1202 12:14:37.755413 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerStarted","Data":"2227e89dde2f1f721e42d5121d14a9612770eb66446ca5e73b9c0fe0de9ee21a"} Dec 02 12:14:37 crc kubenswrapper[4721]: I1202 12:14:37.755470 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerStarted","Data":"ee71c6d4d189c7994febacce98108fbd3c4e28a74cace6a8a7385f4cc18b9579"} Dec 02 12:14:37 crc kubenswrapper[4721]: I1202 12:14:37.755484 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerStarted","Data":"2a9b50dd5e1daa512a86b757bddde61b1f2a2f487d622ef27cc897d475ae9c99"} Dec 02 12:14:37 crc kubenswrapper[4721]: I1202 12:14:37.755496 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerStarted","Data":"7652d5f506e92d8c2f2750ef7eb1d5859d3f2c5fead70a5df6bcbd55221c7dce"} Dec 02 12:14:37 crc kubenswrapper[4721]: I1202 12:14:37.755508 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerStarted","Data":"bef0a68830aadde51e8762ecc134301f5f369fa49cee7a58d3057dbb50d2774d"} Dec 02 12:14:37 crc kubenswrapper[4721]: I1202 12:14:37.953291 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-22p4n" Dec 02 12:14:38 crc kubenswrapper[4721]: I1202 12:14:38.055360 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc7a8167-f9eb-4d3b-8e66-c585f6e2e448-catalog-content\") pod \"bc7a8167-f9eb-4d3b-8e66-c585f6e2e448\" (UID: \"bc7a8167-f9eb-4d3b-8e66-c585f6e2e448\") " Dec 02 12:14:38 crc kubenswrapper[4721]: I1202 12:14:38.055418 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gcdfm\" (UniqueName: \"kubernetes.io/projected/bc7a8167-f9eb-4d3b-8e66-c585f6e2e448-kube-api-access-gcdfm\") pod \"bc7a8167-f9eb-4d3b-8e66-c585f6e2e448\" (UID: \"bc7a8167-f9eb-4d3b-8e66-c585f6e2e448\") " Dec 02 12:14:38 crc kubenswrapper[4721]: I1202 12:14:38.055528 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc7a8167-f9eb-4d3b-8e66-c585f6e2e448-utilities\") pod \"bc7a8167-f9eb-4d3b-8e66-c585f6e2e448\" (UID: \"bc7a8167-f9eb-4d3b-8e66-c585f6e2e448\") " Dec 02 12:14:38 crc kubenswrapper[4721]: I1202 12:14:38.056387 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc7a8167-f9eb-4d3b-8e66-c585f6e2e448-utilities" (OuterVolumeSpecName: "utilities") pod "bc7a8167-f9eb-4d3b-8e66-c585f6e2e448" (UID: "bc7a8167-f9eb-4d3b-8e66-c585f6e2e448"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:14:38 crc kubenswrapper[4721]: I1202 12:14:38.065490 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc7a8167-f9eb-4d3b-8e66-c585f6e2e448-kube-api-access-gcdfm" (OuterVolumeSpecName: "kube-api-access-gcdfm") pod "bc7a8167-f9eb-4d3b-8e66-c585f6e2e448" (UID: "bc7a8167-f9eb-4d3b-8e66-c585f6e2e448"). InnerVolumeSpecName "kube-api-access-gcdfm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:14:38 crc kubenswrapper[4721]: I1202 12:14:38.158600 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc7a8167-f9eb-4d3b-8e66-c585f6e2e448-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:38 crc kubenswrapper[4721]: I1202 12:14:38.158648 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gcdfm\" (UniqueName: \"kubernetes.io/projected/bc7a8167-f9eb-4d3b-8e66-c585f6e2e448-kube-api-access-gcdfm\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:38 crc kubenswrapper[4721]: I1202 12:14:38.185265 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc7a8167-f9eb-4d3b-8e66-c585f6e2e448-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bc7a8167-f9eb-4d3b-8e66-c585f6e2e448" (UID: "bc7a8167-f9eb-4d3b-8e66-c585f6e2e448"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:14:38 crc kubenswrapper[4721]: I1202 12:14:38.259914 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc7a8167-f9eb-4d3b-8e66-c585f6e2e448-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:38 crc kubenswrapper[4721]: I1202 12:14:38.772070 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerStarted","Data":"82522c1257a78f6fc8e38dd7b0039f98daa00a591d6efed9d2a3dfcc48f5afe3"} Dec 02 12:14:38 crc kubenswrapper[4721]: I1202 12:14:38.772118 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerStarted","Data":"e86a828faebecaf964bbb3588ddfe48fe57179dc34045fc05f3374c0d59d525b"} Dec 02 12:14:38 crc kubenswrapper[4721]: I1202 12:14:38.776096 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-22p4n" event={"ID":"bc7a8167-f9eb-4d3b-8e66-c585f6e2e448","Type":"ContainerDied","Data":"98d6a7173d49f593286c7d3b2823058caabae4f428241a0d96e83469f69fa6bc"} Dec 02 12:14:38 crc kubenswrapper[4721]: I1202 12:14:38.776171 4721 scope.go:117] "RemoveContainer" containerID="b0ee22269ab003aa5d3374488f0a8ac678b6e5f54629d8b808108df7f7c803e9" Dec 02 12:14:38 crc kubenswrapper[4721]: I1202 12:14:38.776321 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-22p4n" Dec 02 12:14:38 crc kubenswrapper[4721]: I1202 12:14:38.804782 4721 scope.go:117] "RemoveContainer" containerID="9b8bd65c3c40a5343d66d721a7cb13c30d15d3e5e0e8dd765f2d0d657cf3a174" Dec 02 12:14:38 crc kubenswrapper[4721]: I1202 12:14:38.855438 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-storage-0" podStartSLOduration=13.855411716 podStartE2EDuration="13.855411716s" podCreationTimestamp="2025-12-02 12:14:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:14:38.845230681 +0000 UTC m=+1408.583781070" watchObservedRunningTime="2025-12-02 12:14:38.855411716 +0000 UTC m=+1408.593962055" Dec 02 12:14:38 crc kubenswrapper[4721]: I1202 12:14:38.877047 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-22p4n"] Dec 02 12:14:38 crc kubenswrapper[4721]: I1202 12:14:38.877083 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-22p4n"] Dec 02 12:14:38 crc kubenswrapper[4721]: I1202 12:14:38.881549 4721 scope.go:117] "RemoveContainer" containerID="b796c52d41c18abf568ae62cd5db5866715992bab942b7be52a8a7fd8e5df8f1" Dec 02 12:14:40 crc kubenswrapper[4721]: I1202 12:14:40.074309 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" Dec 02 12:14:40 crc kubenswrapper[4721]: I1202 12:14:40.074391 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" Dec 02 12:14:40 crc kubenswrapper[4721]: I1202 12:14:40.848938 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc7a8167-f9eb-4d3b-8e66-c585f6e2e448" path="/var/lib/kubelet/pods/bc7a8167-f9eb-4d3b-8e66-c585f6e2e448/volumes" Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.563855 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.564658 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="account-server" containerID="cri-o://5c2c257e405a3ee5ab9a42fba81bc67e11d37f0db4b2fde8c43de0c19923c5d7" gracePeriod=30 Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.564960 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="swift-recon-cron" containerID="cri-o://82522c1257a78f6fc8e38dd7b0039f98daa00a591d6efed9d2a3dfcc48f5afe3" gracePeriod=30 Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.565014 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="rsync" containerID="cri-o://e86a828faebecaf964bbb3588ddfe48fe57179dc34045fc05f3374c0d59d525b" gracePeriod=30 Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.565047 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="object-expirer" containerID="cri-o://2227e89dde2f1f721e42d5121d14a9612770eb66446ca5e73b9c0fe0de9ee21a" gracePeriod=30 Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.565077 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="object-updater" containerID="cri-o://ee71c6d4d189c7994febacce98108fbd3c4e28a74cace6a8a7385f4cc18b9579" gracePeriod=30 Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.565106 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="object-auditor" containerID="cri-o://2a9b50dd5e1daa512a86b757bddde61b1f2a2f487d622ef27cc897d475ae9c99" gracePeriod=30 Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.565137 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="object-replicator" containerID="cri-o://7652d5f506e92d8c2f2750ef7eb1d5859d3f2c5fead70a5df6bcbd55221c7dce" gracePeriod=30 Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.565165 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="object-server" containerID="cri-o://bef0a68830aadde51e8762ecc134301f5f369fa49cee7a58d3057dbb50d2774d" gracePeriod=30 Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.565195 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="container-updater" containerID="cri-o://f83ef1e3f11eec89afa078c016ae6a62e6fff25cf605054ac2689db17732cf00" gracePeriod=30 Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.565235 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="container-auditor" containerID="cri-o://337a09f75ad47f99ff3cbbe394913ec228ec14745401e2d14a462b4ed81e8cfa" gracePeriod=30 Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.565265 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="container-replicator" containerID="cri-o://71d647bca013c32f1eaa51f96cd2319fdcb6c8a80ef7e365b131af59699fe5e1" gracePeriod=30 Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.565293 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="container-server" containerID="cri-o://3b1fe7983cbd31d453c7a672e66adcc08d3417de0998a8a9df68e3bafe3a87b2" gracePeriod=30 Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.565320 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="account-reaper" containerID="cri-o://e54cab2b7d3edd263c5595f6f3d7856b8ded9f45b924ea2258f2310f0bcb78e0" gracePeriod=30 Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.565372 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="account-auditor" containerID="cri-o://ceba2f7ea1ea7e91acbf1eb79a5785106a1bc050744f606a1d3b56029227918e" gracePeriod=30 Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.565401 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="account-replicator" containerID="cri-o://2cfec86320141b34671bd8e2d97ae669ef7a39cb1960c8cbd3aa4d055ec17502" gracePeriod=30 Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.591400 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-l6w9c"] Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.601472 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-l6w9c"] Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.618730 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb"] Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.619025 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" podUID="efcd5748-285b-4688-a236-ce8ac1a5eac2" containerName="proxy-server" containerID="cri-o://f367a7d7b15f755dadad4ade45d664fc281bfc84700a74d6b13ac14be03f1a2a" gracePeriod=30 Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.619198 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" podUID="efcd5748-285b-4688-a236-ce8ac1a5eac2" containerName="proxy-httpd" containerID="cri-o://064427ce236d0cba912f9ffc4e8d58622ad094250303cdd840aa774414ab1288" gracePeriod=30 Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.821054 4721 generic.go:334] "Generic (PLEG): container finished" podID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerID="2227e89dde2f1f721e42d5121d14a9612770eb66446ca5e73b9c0fe0de9ee21a" exitCode=0 Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.821130 4721 generic.go:334] "Generic (PLEG): container finished" podID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerID="ee71c6d4d189c7994febacce98108fbd3c4e28a74cace6a8a7385f4cc18b9579" exitCode=0 Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.821139 4721 generic.go:334] "Generic (PLEG): container finished" podID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerID="2a9b50dd5e1daa512a86b757bddde61b1f2a2f487d622ef27cc897d475ae9c99" exitCode=0 Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.821147 4721 generic.go:334] "Generic (PLEG): container finished" podID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerID="7652d5f506e92d8c2f2750ef7eb1d5859d3f2c5fead70a5df6bcbd55221c7dce" exitCode=0 Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.821171 4721 generic.go:334] "Generic (PLEG): container finished" podID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerID="bef0a68830aadde51e8762ecc134301f5f369fa49cee7a58d3057dbb50d2774d" exitCode=0 Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.821178 4721 generic.go:334] "Generic (PLEG): container finished" podID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerID="f83ef1e3f11eec89afa078c016ae6a62e6fff25cf605054ac2689db17732cf00" exitCode=0 Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.821185 4721 generic.go:334] "Generic (PLEG): container finished" podID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerID="337a09f75ad47f99ff3cbbe394913ec228ec14745401e2d14a462b4ed81e8cfa" exitCode=0 Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.821192 4721 generic.go:334] "Generic (PLEG): container finished" podID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerID="71d647bca013c32f1eaa51f96cd2319fdcb6c8a80ef7e365b131af59699fe5e1" exitCode=0 Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.821199 4721 generic.go:334] "Generic (PLEG): container finished" podID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerID="e54cab2b7d3edd263c5595f6f3d7856b8ded9f45b924ea2258f2310f0bcb78e0" exitCode=0 Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.821205 4721 generic.go:334] "Generic (PLEG): container finished" podID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerID="ceba2f7ea1ea7e91acbf1eb79a5785106a1bc050744f606a1d3b56029227918e" exitCode=0 Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.821105 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerDied","Data":"2227e89dde2f1f721e42d5121d14a9612770eb66446ca5e73b9c0fe0de9ee21a"} Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.821253 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerDied","Data":"ee71c6d4d189c7994febacce98108fbd3c4e28a74cace6a8a7385f4cc18b9579"} Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.821274 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerDied","Data":"2a9b50dd5e1daa512a86b757bddde61b1f2a2f487d622ef27cc897d475ae9c99"} Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.821285 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerDied","Data":"7652d5f506e92d8c2f2750ef7eb1d5859d3f2c5fead70a5df6bcbd55221c7dce"} Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.821296 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerDied","Data":"bef0a68830aadde51e8762ecc134301f5f369fa49cee7a58d3057dbb50d2774d"} Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.821306 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerDied","Data":"f83ef1e3f11eec89afa078c016ae6a62e6fff25cf605054ac2689db17732cf00"} Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.821315 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerDied","Data":"337a09f75ad47f99ff3cbbe394913ec228ec14745401e2d14a462b4ed81e8cfa"} Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.821323 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerDied","Data":"71d647bca013c32f1eaa51f96cd2319fdcb6c8a80ef7e365b131af59699fe5e1"} Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.821332 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerDied","Data":"e54cab2b7d3edd263c5595f6f3d7856b8ded9f45b924ea2258f2310f0bcb78e0"} Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.821211 4721 generic.go:334] "Generic (PLEG): container finished" podID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerID="2cfec86320141b34671bd8e2d97ae669ef7a39cb1960c8cbd3aa4d055ec17502" exitCode=0 Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.821355 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerDied","Data":"ceba2f7ea1ea7e91acbf1eb79a5785106a1bc050744f606a1d3b56029227918e"} Dec 02 12:14:41 crc kubenswrapper[4721]: I1202 12:14:41.821391 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerDied","Data":"2cfec86320141b34671bd8e2d97ae669ef7a39cb1960c8cbd3aa4d055ec17502"} Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.563975 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.735202 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efcd5748-285b-4688-a236-ce8ac1a5eac2-log-httpd\") pod \"efcd5748-285b-4688-a236-ce8ac1a5eac2\" (UID: \"efcd5748-285b-4688-a236-ce8ac1a5eac2\") " Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.735244 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efcd5748-285b-4688-a236-ce8ac1a5eac2-config-data\") pod \"efcd5748-285b-4688-a236-ce8ac1a5eac2\" (UID: \"efcd5748-285b-4688-a236-ce8ac1a5eac2\") " Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.735291 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w6q76\" (UniqueName: \"kubernetes.io/projected/efcd5748-285b-4688-a236-ce8ac1a5eac2-kube-api-access-w6q76\") pod \"efcd5748-285b-4688-a236-ce8ac1a5eac2\" (UID: \"efcd5748-285b-4688-a236-ce8ac1a5eac2\") " Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.735440 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efcd5748-285b-4688-a236-ce8ac1a5eac2-run-httpd\") pod \"efcd5748-285b-4688-a236-ce8ac1a5eac2\" (UID: \"efcd5748-285b-4688-a236-ce8ac1a5eac2\") " Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.735463 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/efcd5748-285b-4688-a236-ce8ac1a5eac2-etc-swift\") pod \"efcd5748-285b-4688-a236-ce8ac1a5eac2\" (UID: \"efcd5748-285b-4688-a236-ce8ac1a5eac2\") " Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.735655 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/efcd5748-285b-4688-a236-ce8ac1a5eac2-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "efcd5748-285b-4688-a236-ce8ac1a5eac2" (UID: "efcd5748-285b-4688-a236-ce8ac1a5eac2"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.735726 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/efcd5748-285b-4688-a236-ce8ac1a5eac2-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "efcd5748-285b-4688-a236-ce8ac1a5eac2" (UID: "efcd5748-285b-4688-a236-ce8ac1a5eac2"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.735907 4721 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efcd5748-285b-4688-a236-ce8ac1a5eac2-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.735927 4721 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efcd5748-285b-4688-a236-ce8ac1a5eac2-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.747546 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efcd5748-285b-4688-a236-ce8ac1a5eac2-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "efcd5748-285b-4688-a236-ce8ac1a5eac2" (UID: "efcd5748-285b-4688-a236-ce8ac1a5eac2"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.747631 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efcd5748-285b-4688-a236-ce8ac1a5eac2-kube-api-access-w6q76" (OuterVolumeSpecName: "kube-api-access-w6q76") pod "efcd5748-285b-4688-a236-ce8ac1a5eac2" (UID: "efcd5748-285b-4688-a236-ce8ac1a5eac2"). InnerVolumeSpecName "kube-api-access-w6q76". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.766969 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efcd5748-285b-4688-a236-ce8ac1a5eac2-config-data" (OuterVolumeSpecName: "config-data") pod "efcd5748-285b-4688-a236-ce8ac1a5eac2" (UID: "efcd5748-285b-4688-a236-ce8ac1a5eac2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.829786 4721 generic.go:334] "Generic (PLEG): container finished" podID="efcd5748-285b-4688-a236-ce8ac1a5eac2" containerID="f367a7d7b15f755dadad4ade45d664fc281bfc84700a74d6b13ac14be03f1a2a" exitCode=0 Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.829809 4721 generic.go:334] "Generic (PLEG): container finished" podID="efcd5748-285b-4688-a236-ce8ac1a5eac2" containerID="064427ce236d0cba912f9ffc4e8d58622ad094250303cdd840aa774414ab1288" exitCode=0 Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.829859 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.829911 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" event={"ID":"efcd5748-285b-4688-a236-ce8ac1a5eac2","Type":"ContainerDied","Data":"f367a7d7b15f755dadad4ade45d664fc281bfc84700a74d6b13ac14be03f1a2a"} Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.829952 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" event={"ID":"efcd5748-285b-4688-a236-ce8ac1a5eac2","Type":"ContainerDied","Data":"064427ce236d0cba912f9ffc4e8d58622ad094250303cdd840aa774414ab1288"} Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.829963 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb" event={"ID":"efcd5748-285b-4688-a236-ce8ac1a5eac2","Type":"ContainerDied","Data":"288e16ab816234e88fe7c17562ac8f01c96b4995602641931217a15d972d2218"} Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.829965 4721 scope.go:117] "RemoveContainer" containerID="f367a7d7b15f755dadad4ade45d664fc281bfc84700a74d6b13ac14be03f1a2a" Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.836884 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/efcd5748-285b-4688-a236-ce8ac1a5eac2-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.836915 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efcd5748-285b-4688-a236-ce8ac1a5eac2-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.836932 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w6q76\" (UniqueName: \"kubernetes.io/projected/efcd5748-285b-4688-a236-ce8ac1a5eac2-kube-api-access-w6q76\") on node \"crc\" DevicePath \"\"" Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.841245 4721 generic.go:334] "Generic (PLEG): container finished" podID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerID="e86a828faebecaf964bbb3588ddfe48fe57179dc34045fc05f3374c0d59d525b" exitCode=0 Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.841271 4721 generic.go:334] "Generic (PLEG): container finished" podID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerID="3b1fe7983cbd31d453c7a672e66adcc08d3417de0998a8a9df68e3bafe3a87b2" exitCode=0 Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.841278 4721 generic.go:334] "Generic (PLEG): container finished" podID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerID="5c2c257e405a3ee5ab9a42fba81bc67e11d37f0db4b2fde8c43de0c19923c5d7" exitCode=0 Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.847083 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1fe9d21-19eb-4f82-a953-4dc4284bedf5" path="/var/lib/kubelet/pods/e1fe9d21-19eb-4f82-a953-4dc4284bedf5/volumes" Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.847868 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerDied","Data":"e86a828faebecaf964bbb3588ddfe48fe57179dc34045fc05f3374c0d59d525b"} Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.847903 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerDied","Data":"3b1fe7983cbd31d453c7a672e66adcc08d3417de0998a8a9df68e3bafe3a87b2"} Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.847917 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerDied","Data":"5c2c257e405a3ee5ab9a42fba81bc67e11d37f0db4b2fde8c43de0c19923c5d7"} Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.852464 4721 scope.go:117] "RemoveContainer" containerID="064427ce236d0cba912f9ffc4e8d58622ad094250303cdd840aa774414ab1288" Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.863389 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb"] Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.868048 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-proxy-b65ff67f9-p72vb"] Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.874754 4721 scope.go:117] "RemoveContainer" containerID="f367a7d7b15f755dadad4ade45d664fc281bfc84700a74d6b13ac14be03f1a2a" Dec 02 12:14:42 crc kubenswrapper[4721]: E1202 12:14:42.875220 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f367a7d7b15f755dadad4ade45d664fc281bfc84700a74d6b13ac14be03f1a2a\": container with ID starting with f367a7d7b15f755dadad4ade45d664fc281bfc84700a74d6b13ac14be03f1a2a not found: ID does not exist" containerID="f367a7d7b15f755dadad4ade45d664fc281bfc84700a74d6b13ac14be03f1a2a" Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.875265 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f367a7d7b15f755dadad4ade45d664fc281bfc84700a74d6b13ac14be03f1a2a"} err="failed to get container status \"f367a7d7b15f755dadad4ade45d664fc281bfc84700a74d6b13ac14be03f1a2a\": rpc error: code = NotFound desc = could not find container \"f367a7d7b15f755dadad4ade45d664fc281bfc84700a74d6b13ac14be03f1a2a\": container with ID starting with f367a7d7b15f755dadad4ade45d664fc281bfc84700a74d6b13ac14be03f1a2a not found: ID does not exist" Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.875296 4721 scope.go:117] "RemoveContainer" containerID="064427ce236d0cba912f9ffc4e8d58622ad094250303cdd840aa774414ab1288" Dec 02 12:14:42 crc kubenswrapper[4721]: E1202 12:14:42.875692 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"064427ce236d0cba912f9ffc4e8d58622ad094250303cdd840aa774414ab1288\": container with ID starting with 064427ce236d0cba912f9ffc4e8d58622ad094250303cdd840aa774414ab1288 not found: ID does not exist" containerID="064427ce236d0cba912f9ffc4e8d58622ad094250303cdd840aa774414ab1288" Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.875737 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"064427ce236d0cba912f9ffc4e8d58622ad094250303cdd840aa774414ab1288"} err="failed to get container status \"064427ce236d0cba912f9ffc4e8d58622ad094250303cdd840aa774414ab1288\": rpc error: code = NotFound desc = could not find container \"064427ce236d0cba912f9ffc4e8d58622ad094250303cdd840aa774414ab1288\": container with ID starting with 064427ce236d0cba912f9ffc4e8d58622ad094250303cdd840aa774414ab1288 not found: ID does not exist" Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.875766 4721 scope.go:117] "RemoveContainer" containerID="f367a7d7b15f755dadad4ade45d664fc281bfc84700a74d6b13ac14be03f1a2a" Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.876174 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f367a7d7b15f755dadad4ade45d664fc281bfc84700a74d6b13ac14be03f1a2a"} err="failed to get container status \"f367a7d7b15f755dadad4ade45d664fc281bfc84700a74d6b13ac14be03f1a2a\": rpc error: code = NotFound desc = could not find container \"f367a7d7b15f755dadad4ade45d664fc281bfc84700a74d6b13ac14be03f1a2a\": container with ID starting with f367a7d7b15f755dadad4ade45d664fc281bfc84700a74d6b13ac14be03f1a2a not found: ID does not exist" Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.876195 4721 scope.go:117] "RemoveContainer" containerID="064427ce236d0cba912f9ffc4e8d58622ad094250303cdd840aa774414ab1288" Dec 02 12:14:42 crc kubenswrapper[4721]: I1202 12:14:42.876419 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"064427ce236d0cba912f9ffc4e8d58622ad094250303cdd840aa774414ab1288"} err="failed to get container status \"064427ce236d0cba912f9ffc4e8d58622ad094250303cdd840aa774414ab1288\": rpc error: code = NotFound desc = could not find container \"064427ce236d0cba912f9ffc4e8d58622ad094250303cdd840aa774414ab1288\": container with ID starting with 064427ce236d0cba912f9ffc4e8d58622ad094250303cdd840aa774414ab1288 not found: ID does not exist" Dec 02 12:14:44 crc kubenswrapper[4721]: I1202 12:14:44.855599 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efcd5748-285b-4688-a236-ce8ac1a5eac2" path="/var/lib/kubelet/pods/efcd5748-285b-4688-a236-ce8ac1a5eac2/volumes" Dec 02 12:14:55 crc kubenswrapper[4721]: I1202 12:14:55.703033 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mcr6s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 12:14:55 crc kubenswrapper[4721]: I1202 12:14:55.704432 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.155941 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411295-kwvqt"] Dec 02 12:15:00 crc kubenswrapper[4721]: E1202 12:15:00.159555 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc7a8167-f9eb-4d3b-8e66-c585f6e2e448" containerName="extract-content" Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.159594 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc7a8167-f9eb-4d3b-8e66-c585f6e2e448" containerName="extract-content" Dec 02 12:15:00 crc kubenswrapper[4721]: E1202 12:15:00.159619 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f50dc76-abc7-4138-aee1-6c83241c363e" containerName="extract-utilities" Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.159632 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f50dc76-abc7-4138-aee1-6c83241c363e" containerName="extract-utilities" Dec 02 12:15:00 crc kubenswrapper[4721]: E1202 12:15:00.159655 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1fe9d21-19eb-4f82-a953-4dc4284bedf5" containerName="swift-ring-rebalance" Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.159671 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1fe9d21-19eb-4f82-a953-4dc4284bedf5" containerName="swift-ring-rebalance" Dec 02 12:15:00 crc kubenswrapper[4721]: E1202 12:15:00.159693 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f50dc76-abc7-4138-aee1-6c83241c363e" containerName="registry-server" Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.159706 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f50dc76-abc7-4138-aee1-6c83241c363e" containerName="registry-server" Dec 02 12:15:00 crc kubenswrapper[4721]: E1202 12:15:00.159726 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efcd5748-285b-4688-a236-ce8ac1a5eac2" containerName="proxy-httpd" Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.159737 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="efcd5748-285b-4688-a236-ce8ac1a5eac2" containerName="proxy-httpd" Dec 02 12:15:00 crc kubenswrapper[4721]: E1202 12:15:00.159757 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f50dc76-abc7-4138-aee1-6c83241c363e" containerName="extract-content" Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.159769 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f50dc76-abc7-4138-aee1-6c83241c363e" containerName="extract-content" Dec 02 12:15:00 crc kubenswrapper[4721]: E1202 12:15:00.159789 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efcd5748-285b-4688-a236-ce8ac1a5eac2" containerName="proxy-server" Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.159801 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="efcd5748-285b-4688-a236-ce8ac1a5eac2" containerName="proxy-server" Dec 02 12:15:00 crc kubenswrapper[4721]: E1202 12:15:00.159820 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc7a8167-f9eb-4d3b-8e66-c585f6e2e448" containerName="extract-utilities" Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.159832 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc7a8167-f9eb-4d3b-8e66-c585f6e2e448" containerName="extract-utilities" Dec 02 12:15:00 crc kubenswrapper[4721]: E1202 12:15:00.159868 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc7a8167-f9eb-4d3b-8e66-c585f6e2e448" containerName="registry-server" Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.159880 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc7a8167-f9eb-4d3b-8e66-c585f6e2e448" containerName="registry-server" Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.160136 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="efcd5748-285b-4688-a236-ce8ac1a5eac2" containerName="proxy-httpd" Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.160167 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1fe9d21-19eb-4f82-a953-4dc4284bedf5" containerName="swift-ring-rebalance" Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.160190 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f50dc76-abc7-4138-aee1-6c83241c363e" containerName="registry-server" Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.160205 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="efcd5748-285b-4688-a236-ce8ac1a5eac2" containerName="proxy-server" Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.160231 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc7a8167-f9eb-4d3b-8e66-c585f6e2e448" containerName="registry-server" Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.161530 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411295-kwvqt" Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.165484 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.165542 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.172876 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411295-kwvqt"] Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.255225 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j78ks\" (UniqueName: \"kubernetes.io/projected/41f40b3c-9bc1-40a1-8f9f-c234185f2d98-kube-api-access-j78ks\") pod \"collect-profiles-29411295-kwvqt\" (UID: \"41f40b3c-9bc1-40a1-8f9f-c234185f2d98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411295-kwvqt" Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.255465 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/41f40b3c-9bc1-40a1-8f9f-c234185f2d98-secret-volume\") pod \"collect-profiles-29411295-kwvqt\" (UID: \"41f40b3c-9bc1-40a1-8f9f-c234185f2d98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411295-kwvqt" Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.255644 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/41f40b3c-9bc1-40a1-8f9f-c234185f2d98-config-volume\") pod \"collect-profiles-29411295-kwvqt\" (UID: \"41f40b3c-9bc1-40a1-8f9f-c234185f2d98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411295-kwvqt" Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.357558 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/41f40b3c-9bc1-40a1-8f9f-c234185f2d98-secret-volume\") pod \"collect-profiles-29411295-kwvqt\" (UID: \"41f40b3c-9bc1-40a1-8f9f-c234185f2d98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411295-kwvqt" Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.357727 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/41f40b3c-9bc1-40a1-8f9f-c234185f2d98-config-volume\") pod \"collect-profiles-29411295-kwvqt\" (UID: \"41f40b3c-9bc1-40a1-8f9f-c234185f2d98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411295-kwvqt" Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.357768 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j78ks\" (UniqueName: \"kubernetes.io/projected/41f40b3c-9bc1-40a1-8f9f-c234185f2d98-kube-api-access-j78ks\") pod \"collect-profiles-29411295-kwvqt\" (UID: \"41f40b3c-9bc1-40a1-8f9f-c234185f2d98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411295-kwvqt" Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.359146 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/41f40b3c-9bc1-40a1-8f9f-c234185f2d98-config-volume\") pod \"collect-profiles-29411295-kwvqt\" (UID: \"41f40b3c-9bc1-40a1-8f9f-c234185f2d98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411295-kwvqt" Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.366901 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/41f40b3c-9bc1-40a1-8f9f-c234185f2d98-secret-volume\") pod \"collect-profiles-29411295-kwvqt\" (UID: \"41f40b3c-9bc1-40a1-8f9f-c234185f2d98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411295-kwvqt" Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.382940 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j78ks\" (UniqueName: \"kubernetes.io/projected/41f40b3c-9bc1-40a1-8f9f-c234185f2d98-kube-api-access-j78ks\") pod \"collect-profiles-29411295-kwvqt\" (UID: \"41f40b3c-9bc1-40a1-8f9f-c234185f2d98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411295-kwvqt" Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.528103 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411295-kwvqt" Dec 02 12:15:00 crc kubenswrapper[4721]: I1202 12:15:00.733856 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411295-kwvqt"] Dec 02 12:15:01 crc kubenswrapper[4721]: I1202 12:15:01.028437 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411295-kwvqt" event={"ID":"41f40b3c-9bc1-40a1-8f9f-c234185f2d98","Type":"ContainerStarted","Data":"f9872482c71a46ad119392b069285a1f3b6cc04838def410139f572685a08fa6"} Dec 02 12:15:01 crc kubenswrapper[4721]: I1202 12:15:01.028739 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411295-kwvqt" event={"ID":"41f40b3c-9bc1-40a1-8f9f-c234185f2d98","Type":"ContainerStarted","Data":"7f0d0fd62208d522064e35bac2a0a82d4f576a0f1e087b9fee48ae0f534e791a"} Dec 02 12:15:01 crc kubenswrapper[4721]: I1202 12:15:01.043375 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29411295-kwvqt" podStartSLOduration=1.043336657 podStartE2EDuration="1.043336657s" podCreationTimestamp="2025-12-02 12:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:15:01.040009655 +0000 UTC m=+1430.778560014" watchObservedRunningTime="2025-12-02 12:15:01.043336657 +0000 UTC m=+1430.781887006" Dec 02 12:15:02 crc kubenswrapper[4721]: I1202 12:15:02.038153 4721 generic.go:334] "Generic (PLEG): container finished" podID="41f40b3c-9bc1-40a1-8f9f-c234185f2d98" containerID="f9872482c71a46ad119392b069285a1f3b6cc04838def410139f572685a08fa6" exitCode=0 Dec 02 12:15:02 crc kubenswrapper[4721]: I1202 12:15:02.038202 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411295-kwvqt" event={"ID":"41f40b3c-9bc1-40a1-8f9f-c234185f2d98","Type":"ContainerDied","Data":"f9872482c71a46ad119392b069285a1f3b6cc04838def410139f572685a08fa6"} Dec 02 12:15:03 crc kubenswrapper[4721]: I1202 12:15:03.349917 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411295-kwvqt" Dec 02 12:15:03 crc kubenswrapper[4721]: I1202 12:15:03.500391 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/41f40b3c-9bc1-40a1-8f9f-c234185f2d98-config-volume\") pod \"41f40b3c-9bc1-40a1-8f9f-c234185f2d98\" (UID: \"41f40b3c-9bc1-40a1-8f9f-c234185f2d98\") " Dec 02 12:15:03 crc kubenswrapper[4721]: I1202 12:15:03.500457 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j78ks\" (UniqueName: \"kubernetes.io/projected/41f40b3c-9bc1-40a1-8f9f-c234185f2d98-kube-api-access-j78ks\") pod \"41f40b3c-9bc1-40a1-8f9f-c234185f2d98\" (UID: \"41f40b3c-9bc1-40a1-8f9f-c234185f2d98\") " Dec 02 12:15:03 crc kubenswrapper[4721]: I1202 12:15:03.500694 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/41f40b3c-9bc1-40a1-8f9f-c234185f2d98-secret-volume\") pod \"41f40b3c-9bc1-40a1-8f9f-c234185f2d98\" (UID: \"41f40b3c-9bc1-40a1-8f9f-c234185f2d98\") " Dec 02 12:15:03 crc kubenswrapper[4721]: I1202 12:15:03.500936 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41f40b3c-9bc1-40a1-8f9f-c234185f2d98-config-volume" (OuterVolumeSpecName: "config-volume") pod "41f40b3c-9bc1-40a1-8f9f-c234185f2d98" (UID: "41f40b3c-9bc1-40a1-8f9f-c234185f2d98"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:15:03 crc kubenswrapper[4721]: I1202 12:15:03.501287 4721 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/41f40b3c-9bc1-40a1-8f9f-c234185f2d98-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 12:15:03 crc kubenswrapper[4721]: I1202 12:15:03.505529 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41f40b3c-9bc1-40a1-8f9f-c234185f2d98-kube-api-access-j78ks" (OuterVolumeSpecName: "kube-api-access-j78ks") pod "41f40b3c-9bc1-40a1-8f9f-c234185f2d98" (UID: "41f40b3c-9bc1-40a1-8f9f-c234185f2d98"). InnerVolumeSpecName "kube-api-access-j78ks". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:15:03 crc kubenswrapper[4721]: I1202 12:15:03.506649 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41f40b3c-9bc1-40a1-8f9f-c234185f2d98-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "41f40b3c-9bc1-40a1-8f9f-c234185f2d98" (UID: "41f40b3c-9bc1-40a1-8f9f-c234185f2d98"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:15:03 crc kubenswrapper[4721]: I1202 12:15:03.602104 4721 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/41f40b3c-9bc1-40a1-8f9f-c234185f2d98-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 12:15:03 crc kubenswrapper[4721]: I1202 12:15:03.602514 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j78ks\" (UniqueName: \"kubernetes.io/projected/41f40b3c-9bc1-40a1-8f9f-c234185f2d98-kube-api-access-j78ks\") on node \"crc\" DevicePath \"\"" Dec 02 12:15:04 crc kubenswrapper[4721]: I1202 12:15:04.056158 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411295-kwvqt" event={"ID":"41f40b3c-9bc1-40a1-8f9f-c234185f2d98","Type":"ContainerDied","Data":"7f0d0fd62208d522064e35bac2a0a82d4f576a0f1e087b9fee48ae0f534e791a"} Dec 02 12:15:04 crc kubenswrapper[4721]: I1202 12:15:04.056203 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7f0d0fd62208d522064e35bac2a0a82d4f576a0f1e087b9fee48ae0f534e791a" Dec 02 12:15:04 crc kubenswrapper[4721]: I1202 12:15:04.056240 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411295-kwvqt" Dec 02 12:15:11 crc kubenswrapper[4721]: I1202 12:15:11.977892 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.132050 4721 generic.go:334] "Generic (PLEG): container finished" podID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerID="82522c1257a78f6fc8e38dd7b0039f98daa00a591d6efed9d2a3dfcc48f5afe3" exitCode=137 Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.132095 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerDied","Data":"82522c1257a78f6fc8e38dd7b0039f98daa00a591d6efed9d2a3dfcc48f5afe3"} Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.132132 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"6db5858b-86d7-4b98-8d7f-26317521edcd","Type":"ContainerDied","Data":"85d68ffea577b93a4624f507b238f2628cc169525e7f05e8507f26eb06472f9b"} Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.132161 4721 scope.go:117] "RemoveContainer" containerID="82522c1257a78f6fc8e38dd7b0039f98daa00a591d6efed9d2a3dfcc48f5afe3" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.132160 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.152407 4721 scope.go:117] "RemoveContainer" containerID="e86a828faebecaf964bbb3588ddfe48fe57179dc34045fc05f3374c0d59d525b" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.154598 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gxm4w\" (UniqueName: \"kubernetes.io/projected/6db5858b-86d7-4b98-8d7f-26317521edcd-kube-api-access-gxm4w\") pod \"6db5858b-86d7-4b98-8d7f-26317521edcd\" (UID: \"6db5858b-86d7-4b98-8d7f-26317521edcd\") " Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.154705 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6db5858b-86d7-4b98-8d7f-26317521edcd-etc-swift\") pod \"6db5858b-86d7-4b98-8d7f-26317521edcd\" (UID: \"6db5858b-86d7-4b98-8d7f-26317521edcd\") " Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.154744 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/6db5858b-86d7-4b98-8d7f-26317521edcd-lock\") pod \"6db5858b-86d7-4b98-8d7f-26317521edcd\" (UID: \"6db5858b-86d7-4b98-8d7f-26317521edcd\") " Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.154777 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"6db5858b-86d7-4b98-8d7f-26317521edcd\" (UID: \"6db5858b-86d7-4b98-8d7f-26317521edcd\") " Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.154890 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/6db5858b-86d7-4b98-8d7f-26317521edcd-cache\") pod \"6db5858b-86d7-4b98-8d7f-26317521edcd\" (UID: \"6db5858b-86d7-4b98-8d7f-26317521edcd\") " Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.155265 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6db5858b-86d7-4b98-8d7f-26317521edcd-lock" (OuterVolumeSpecName: "lock") pod "6db5858b-86d7-4b98-8d7f-26317521edcd" (UID: "6db5858b-86d7-4b98-8d7f-26317521edcd"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.155417 4721 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/6db5858b-86d7-4b98-8d7f-26317521edcd-lock\") on node \"crc\" DevicePath \"\"" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.155612 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6db5858b-86d7-4b98-8d7f-26317521edcd-cache" (OuterVolumeSpecName: "cache") pod "6db5858b-86d7-4b98-8d7f-26317521edcd" (UID: "6db5858b-86d7-4b98-8d7f-26317521edcd"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.159918 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6db5858b-86d7-4b98-8d7f-26317521edcd-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "6db5858b-86d7-4b98-8d7f-26317521edcd" (UID: "6db5858b-86d7-4b98-8d7f-26317521edcd"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.160090 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6db5858b-86d7-4b98-8d7f-26317521edcd-kube-api-access-gxm4w" (OuterVolumeSpecName: "kube-api-access-gxm4w") pod "6db5858b-86d7-4b98-8d7f-26317521edcd" (UID: "6db5858b-86d7-4b98-8d7f-26317521edcd"). InnerVolumeSpecName "kube-api-access-gxm4w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.161559 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "swift") pod "6db5858b-86d7-4b98-8d7f-26317521edcd" (UID: "6db5858b-86d7-4b98-8d7f-26317521edcd"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.225364 4721 scope.go:117] "RemoveContainer" containerID="2227e89dde2f1f721e42d5121d14a9612770eb66446ca5e73b9c0fe0de9ee21a" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.239975 4721 scope.go:117] "RemoveContainer" containerID="ee71c6d4d189c7994febacce98108fbd3c4e28a74cace6a8a7385f4cc18b9579" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.255975 4721 scope.go:117] "RemoveContainer" containerID="2a9b50dd5e1daa512a86b757bddde61b1f2a2f487d622ef27cc897d475ae9c99" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.256476 4721 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/6db5858b-86d7-4b98-8d7f-26317521edcd-cache\") on node \"crc\" DevicePath \"\"" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.256495 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gxm4w\" (UniqueName: \"kubernetes.io/projected/6db5858b-86d7-4b98-8d7f-26317521edcd-kube-api-access-gxm4w\") on node \"crc\" DevicePath \"\"" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.256508 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6db5858b-86d7-4b98-8d7f-26317521edcd-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.256537 4721 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.268463 4721 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.277781 4721 scope.go:117] "RemoveContainer" containerID="7652d5f506e92d8c2f2750ef7eb1d5859d3f2c5fead70a5df6bcbd55221c7dce" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.300923 4721 scope.go:117] "RemoveContainer" containerID="bef0a68830aadde51e8762ecc134301f5f369fa49cee7a58d3057dbb50d2774d" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.324725 4721 scope.go:117] "RemoveContainer" containerID="f83ef1e3f11eec89afa078c016ae6a62e6fff25cf605054ac2689db17732cf00" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.343742 4721 scope.go:117] "RemoveContainer" containerID="337a09f75ad47f99ff3cbbe394913ec228ec14745401e2d14a462b4ed81e8cfa" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.357441 4721 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.368099 4721 scope.go:117] "RemoveContainer" containerID="71d647bca013c32f1eaa51f96cd2319fdcb6c8a80ef7e365b131af59699fe5e1" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.393464 4721 scope.go:117] "RemoveContainer" containerID="3b1fe7983cbd31d453c7a672e66adcc08d3417de0998a8a9df68e3bafe3a87b2" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.414923 4721 scope.go:117] "RemoveContainer" containerID="e54cab2b7d3edd263c5595f6f3d7856b8ded9f45b924ea2258f2310f0bcb78e0" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.443318 4721 scope.go:117] "RemoveContainer" containerID="ceba2f7ea1ea7e91acbf1eb79a5785106a1bc050744f606a1d3b56029227918e" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.459035 4721 scope.go:117] "RemoveContainer" containerID="2cfec86320141b34671bd8e2d97ae669ef7a39cb1960c8cbd3aa4d055ec17502" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.477798 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.483195 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.492262 4721 scope.go:117] "RemoveContainer" containerID="5c2c257e405a3ee5ab9a42fba81bc67e11d37f0db4b2fde8c43de0c19923c5d7" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.512981 4721 scope.go:117] "RemoveContainer" containerID="82522c1257a78f6fc8e38dd7b0039f98daa00a591d6efed9d2a3dfcc48f5afe3" Dec 02 12:15:12 crc kubenswrapper[4721]: E1202 12:15:12.513467 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"82522c1257a78f6fc8e38dd7b0039f98daa00a591d6efed9d2a3dfcc48f5afe3\": container with ID starting with 82522c1257a78f6fc8e38dd7b0039f98daa00a591d6efed9d2a3dfcc48f5afe3 not found: ID does not exist" containerID="82522c1257a78f6fc8e38dd7b0039f98daa00a591d6efed9d2a3dfcc48f5afe3" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.513498 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"82522c1257a78f6fc8e38dd7b0039f98daa00a591d6efed9d2a3dfcc48f5afe3"} err="failed to get container status \"82522c1257a78f6fc8e38dd7b0039f98daa00a591d6efed9d2a3dfcc48f5afe3\": rpc error: code = NotFound desc = could not find container \"82522c1257a78f6fc8e38dd7b0039f98daa00a591d6efed9d2a3dfcc48f5afe3\": container with ID starting with 82522c1257a78f6fc8e38dd7b0039f98daa00a591d6efed9d2a3dfcc48f5afe3 not found: ID does not exist" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.513526 4721 scope.go:117] "RemoveContainer" containerID="e86a828faebecaf964bbb3588ddfe48fe57179dc34045fc05f3374c0d59d525b" Dec 02 12:15:12 crc kubenswrapper[4721]: E1202 12:15:12.513912 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e86a828faebecaf964bbb3588ddfe48fe57179dc34045fc05f3374c0d59d525b\": container with ID starting with e86a828faebecaf964bbb3588ddfe48fe57179dc34045fc05f3374c0d59d525b not found: ID does not exist" containerID="e86a828faebecaf964bbb3588ddfe48fe57179dc34045fc05f3374c0d59d525b" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.513935 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e86a828faebecaf964bbb3588ddfe48fe57179dc34045fc05f3374c0d59d525b"} err="failed to get container status \"e86a828faebecaf964bbb3588ddfe48fe57179dc34045fc05f3374c0d59d525b\": rpc error: code = NotFound desc = could not find container \"e86a828faebecaf964bbb3588ddfe48fe57179dc34045fc05f3374c0d59d525b\": container with ID starting with e86a828faebecaf964bbb3588ddfe48fe57179dc34045fc05f3374c0d59d525b not found: ID does not exist" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.513951 4721 scope.go:117] "RemoveContainer" containerID="2227e89dde2f1f721e42d5121d14a9612770eb66446ca5e73b9c0fe0de9ee21a" Dec 02 12:15:12 crc kubenswrapper[4721]: E1202 12:15:12.514194 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2227e89dde2f1f721e42d5121d14a9612770eb66446ca5e73b9c0fe0de9ee21a\": container with ID starting with 2227e89dde2f1f721e42d5121d14a9612770eb66446ca5e73b9c0fe0de9ee21a not found: ID does not exist" containerID="2227e89dde2f1f721e42d5121d14a9612770eb66446ca5e73b9c0fe0de9ee21a" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.514229 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2227e89dde2f1f721e42d5121d14a9612770eb66446ca5e73b9c0fe0de9ee21a"} err="failed to get container status \"2227e89dde2f1f721e42d5121d14a9612770eb66446ca5e73b9c0fe0de9ee21a\": rpc error: code = NotFound desc = could not find container \"2227e89dde2f1f721e42d5121d14a9612770eb66446ca5e73b9c0fe0de9ee21a\": container with ID starting with 2227e89dde2f1f721e42d5121d14a9612770eb66446ca5e73b9c0fe0de9ee21a not found: ID does not exist" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.514251 4721 scope.go:117] "RemoveContainer" containerID="ee71c6d4d189c7994febacce98108fbd3c4e28a74cace6a8a7385f4cc18b9579" Dec 02 12:15:12 crc kubenswrapper[4721]: E1202 12:15:12.514521 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee71c6d4d189c7994febacce98108fbd3c4e28a74cace6a8a7385f4cc18b9579\": container with ID starting with ee71c6d4d189c7994febacce98108fbd3c4e28a74cace6a8a7385f4cc18b9579 not found: ID does not exist" containerID="ee71c6d4d189c7994febacce98108fbd3c4e28a74cace6a8a7385f4cc18b9579" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.514550 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee71c6d4d189c7994febacce98108fbd3c4e28a74cace6a8a7385f4cc18b9579"} err="failed to get container status \"ee71c6d4d189c7994febacce98108fbd3c4e28a74cace6a8a7385f4cc18b9579\": rpc error: code = NotFound desc = could not find container \"ee71c6d4d189c7994febacce98108fbd3c4e28a74cace6a8a7385f4cc18b9579\": container with ID starting with ee71c6d4d189c7994febacce98108fbd3c4e28a74cace6a8a7385f4cc18b9579 not found: ID does not exist" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.514567 4721 scope.go:117] "RemoveContainer" containerID="2a9b50dd5e1daa512a86b757bddde61b1f2a2f487d622ef27cc897d475ae9c99" Dec 02 12:15:12 crc kubenswrapper[4721]: E1202 12:15:12.514792 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a9b50dd5e1daa512a86b757bddde61b1f2a2f487d622ef27cc897d475ae9c99\": container with ID starting with 2a9b50dd5e1daa512a86b757bddde61b1f2a2f487d622ef27cc897d475ae9c99 not found: ID does not exist" containerID="2a9b50dd5e1daa512a86b757bddde61b1f2a2f487d622ef27cc897d475ae9c99" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.514817 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a9b50dd5e1daa512a86b757bddde61b1f2a2f487d622ef27cc897d475ae9c99"} err="failed to get container status \"2a9b50dd5e1daa512a86b757bddde61b1f2a2f487d622ef27cc897d475ae9c99\": rpc error: code = NotFound desc = could not find container \"2a9b50dd5e1daa512a86b757bddde61b1f2a2f487d622ef27cc897d475ae9c99\": container with ID starting with 2a9b50dd5e1daa512a86b757bddde61b1f2a2f487d622ef27cc897d475ae9c99 not found: ID does not exist" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.514832 4721 scope.go:117] "RemoveContainer" containerID="7652d5f506e92d8c2f2750ef7eb1d5859d3f2c5fead70a5df6bcbd55221c7dce" Dec 02 12:15:12 crc kubenswrapper[4721]: E1202 12:15:12.515104 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7652d5f506e92d8c2f2750ef7eb1d5859d3f2c5fead70a5df6bcbd55221c7dce\": container with ID starting with 7652d5f506e92d8c2f2750ef7eb1d5859d3f2c5fead70a5df6bcbd55221c7dce not found: ID does not exist" containerID="7652d5f506e92d8c2f2750ef7eb1d5859d3f2c5fead70a5df6bcbd55221c7dce" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.515133 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7652d5f506e92d8c2f2750ef7eb1d5859d3f2c5fead70a5df6bcbd55221c7dce"} err="failed to get container status \"7652d5f506e92d8c2f2750ef7eb1d5859d3f2c5fead70a5df6bcbd55221c7dce\": rpc error: code = NotFound desc = could not find container \"7652d5f506e92d8c2f2750ef7eb1d5859d3f2c5fead70a5df6bcbd55221c7dce\": container with ID starting with 7652d5f506e92d8c2f2750ef7eb1d5859d3f2c5fead70a5df6bcbd55221c7dce not found: ID does not exist" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.515149 4721 scope.go:117] "RemoveContainer" containerID="bef0a68830aadde51e8762ecc134301f5f369fa49cee7a58d3057dbb50d2774d" Dec 02 12:15:12 crc kubenswrapper[4721]: E1202 12:15:12.515368 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bef0a68830aadde51e8762ecc134301f5f369fa49cee7a58d3057dbb50d2774d\": container with ID starting with bef0a68830aadde51e8762ecc134301f5f369fa49cee7a58d3057dbb50d2774d not found: ID does not exist" containerID="bef0a68830aadde51e8762ecc134301f5f369fa49cee7a58d3057dbb50d2774d" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.515406 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bef0a68830aadde51e8762ecc134301f5f369fa49cee7a58d3057dbb50d2774d"} err="failed to get container status \"bef0a68830aadde51e8762ecc134301f5f369fa49cee7a58d3057dbb50d2774d\": rpc error: code = NotFound desc = could not find container \"bef0a68830aadde51e8762ecc134301f5f369fa49cee7a58d3057dbb50d2774d\": container with ID starting with bef0a68830aadde51e8762ecc134301f5f369fa49cee7a58d3057dbb50d2774d not found: ID does not exist" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.515428 4721 scope.go:117] "RemoveContainer" containerID="f83ef1e3f11eec89afa078c016ae6a62e6fff25cf605054ac2689db17732cf00" Dec 02 12:15:12 crc kubenswrapper[4721]: E1202 12:15:12.515686 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f83ef1e3f11eec89afa078c016ae6a62e6fff25cf605054ac2689db17732cf00\": container with ID starting with f83ef1e3f11eec89afa078c016ae6a62e6fff25cf605054ac2689db17732cf00 not found: ID does not exist" containerID="f83ef1e3f11eec89afa078c016ae6a62e6fff25cf605054ac2689db17732cf00" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.515717 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f83ef1e3f11eec89afa078c016ae6a62e6fff25cf605054ac2689db17732cf00"} err="failed to get container status \"f83ef1e3f11eec89afa078c016ae6a62e6fff25cf605054ac2689db17732cf00\": rpc error: code = NotFound desc = could not find container \"f83ef1e3f11eec89afa078c016ae6a62e6fff25cf605054ac2689db17732cf00\": container with ID starting with f83ef1e3f11eec89afa078c016ae6a62e6fff25cf605054ac2689db17732cf00 not found: ID does not exist" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.515739 4721 scope.go:117] "RemoveContainer" containerID="337a09f75ad47f99ff3cbbe394913ec228ec14745401e2d14a462b4ed81e8cfa" Dec 02 12:15:12 crc kubenswrapper[4721]: E1202 12:15:12.515957 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"337a09f75ad47f99ff3cbbe394913ec228ec14745401e2d14a462b4ed81e8cfa\": container with ID starting with 337a09f75ad47f99ff3cbbe394913ec228ec14745401e2d14a462b4ed81e8cfa not found: ID does not exist" containerID="337a09f75ad47f99ff3cbbe394913ec228ec14745401e2d14a462b4ed81e8cfa" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.515990 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"337a09f75ad47f99ff3cbbe394913ec228ec14745401e2d14a462b4ed81e8cfa"} err="failed to get container status \"337a09f75ad47f99ff3cbbe394913ec228ec14745401e2d14a462b4ed81e8cfa\": rpc error: code = NotFound desc = could not find container \"337a09f75ad47f99ff3cbbe394913ec228ec14745401e2d14a462b4ed81e8cfa\": container with ID starting with 337a09f75ad47f99ff3cbbe394913ec228ec14745401e2d14a462b4ed81e8cfa not found: ID does not exist" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.516013 4721 scope.go:117] "RemoveContainer" containerID="71d647bca013c32f1eaa51f96cd2319fdcb6c8a80ef7e365b131af59699fe5e1" Dec 02 12:15:12 crc kubenswrapper[4721]: E1202 12:15:12.516267 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71d647bca013c32f1eaa51f96cd2319fdcb6c8a80ef7e365b131af59699fe5e1\": container with ID starting with 71d647bca013c32f1eaa51f96cd2319fdcb6c8a80ef7e365b131af59699fe5e1 not found: ID does not exist" containerID="71d647bca013c32f1eaa51f96cd2319fdcb6c8a80ef7e365b131af59699fe5e1" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.516440 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71d647bca013c32f1eaa51f96cd2319fdcb6c8a80ef7e365b131af59699fe5e1"} err="failed to get container status \"71d647bca013c32f1eaa51f96cd2319fdcb6c8a80ef7e365b131af59699fe5e1\": rpc error: code = NotFound desc = could not find container \"71d647bca013c32f1eaa51f96cd2319fdcb6c8a80ef7e365b131af59699fe5e1\": container with ID starting with 71d647bca013c32f1eaa51f96cd2319fdcb6c8a80ef7e365b131af59699fe5e1 not found: ID does not exist" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.516460 4721 scope.go:117] "RemoveContainer" containerID="3b1fe7983cbd31d453c7a672e66adcc08d3417de0998a8a9df68e3bafe3a87b2" Dec 02 12:15:12 crc kubenswrapper[4721]: E1202 12:15:12.516677 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b1fe7983cbd31d453c7a672e66adcc08d3417de0998a8a9df68e3bafe3a87b2\": container with ID starting with 3b1fe7983cbd31d453c7a672e66adcc08d3417de0998a8a9df68e3bafe3a87b2 not found: ID does not exist" containerID="3b1fe7983cbd31d453c7a672e66adcc08d3417de0998a8a9df68e3bafe3a87b2" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.516701 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b1fe7983cbd31d453c7a672e66adcc08d3417de0998a8a9df68e3bafe3a87b2"} err="failed to get container status \"3b1fe7983cbd31d453c7a672e66adcc08d3417de0998a8a9df68e3bafe3a87b2\": rpc error: code = NotFound desc = could not find container \"3b1fe7983cbd31d453c7a672e66adcc08d3417de0998a8a9df68e3bafe3a87b2\": container with ID starting with 3b1fe7983cbd31d453c7a672e66adcc08d3417de0998a8a9df68e3bafe3a87b2 not found: ID does not exist" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.516718 4721 scope.go:117] "RemoveContainer" containerID="e54cab2b7d3edd263c5595f6f3d7856b8ded9f45b924ea2258f2310f0bcb78e0" Dec 02 12:15:12 crc kubenswrapper[4721]: E1202 12:15:12.516973 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e54cab2b7d3edd263c5595f6f3d7856b8ded9f45b924ea2258f2310f0bcb78e0\": container with ID starting with e54cab2b7d3edd263c5595f6f3d7856b8ded9f45b924ea2258f2310f0bcb78e0 not found: ID does not exist" containerID="e54cab2b7d3edd263c5595f6f3d7856b8ded9f45b924ea2258f2310f0bcb78e0" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.516998 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e54cab2b7d3edd263c5595f6f3d7856b8ded9f45b924ea2258f2310f0bcb78e0"} err="failed to get container status \"e54cab2b7d3edd263c5595f6f3d7856b8ded9f45b924ea2258f2310f0bcb78e0\": rpc error: code = NotFound desc = could not find container \"e54cab2b7d3edd263c5595f6f3d7856b8ded9f45b924ea2258f2310f0bcb78e0\": container with ID starting with e54cab2b7d3edd263c5595f6f3d7856b8ded9f45b924ea2258f2310f0bcb78e0 not found: ID does not exist" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.517015 4721 scope.go:117] "RemoveContainer" containerID="ceba2f7ea1ea7e91acbf1eb79a5785106a1bc050744f606a1d3b56029227918e" Dec 02 12:15:12 crc kubenswrapper[4721]: E1202 12:15:12.517223 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ceba2f7ea1ea7e91acbf1eb79a5785106a1bc050744f606a1d3b56029227918e\": container with ID starting with ceba2f7ea1ea7e91acbf1eb79a5785106a1bc050744f606a1d3b56029227918e not found: ID does not exist" containerID="ceba2f7ea1ea7e91acbf1eb79a5785106a1bc050744f606a1d3b56029227918e" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.517249 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ceba2f7ea1ea7e91acbf1eb79a5785106a1bc050744f606a1d3b56029227918e"} err="failed to get container status \"ceba2f7ea1ea7e91acbf1eb79a5785106a1bc050744f606a1d3b56029227918e\": rpc error: code = NotFound desc = could not find container \"ceba2f7ea1ea7e91acbf1eb79a5785106a1bc050744f606a1d3b56029227918e\": container with ID starting with ceba2f7ea1ea7e91acbf1eb79a5785106a1bc050744f606a1d3b56029227918e not found: ID does not exist" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.517265 4721 scope.go:117] "RemoveContainer" containerID="2cfec86320141b34671bd8e2d97ae669ef7a39cb1960c8cbd3aa4d055ec17502" Dec 02 12:15:12 crc kubenswrapper[4721]: E1202 12:15:12.517549 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2cfec86320141b34671bd8e2d97ae669ef7a39cb1960c8cbd3aa4d055ec17502\": container with ID starting with 2cfec86320141b34671bd8e2d97ae669ef7a39cb1960c8cbd3aa4d055ec17502 not found: ID does not exist" containerID="2cfec86320141b34671bd8e2d97ae669ef7a39cb1960c8cbd3aa4d055ec17502" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.517576 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2cfec86320141b34671bd8e2d97ae669ef7a39cb1960c8cbd3aa4d055ec17502"} err="failed to get container status \"2cfec86320141b34671bd8e2d97ae669ef7a39cb1960c8cbd3aa4d055ec17502\": rpc error: code = NotFound desc = could not find container \"2cfec86320141b34671bd8e2d97ae669ef7a39cb1960c8cbd3aa4d055ec17502\": container with ID starting with 2cfec86320141b34671bd8e2d97ae669ef7a39cb1960c8cbd3aa4d055ec17502 not found: ID does not exist" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.517595 4721 scope.go:117] "RemoveContainer" containerID="5c2c257e405a3ee5ab9a42fba81bc67e11d37f0db4b2fde8c43de0c19923c5d7" Dec 02 12:15:12 crc kubenswrapper[4721]: E1202 12:15:12.517864 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c2c257e405a3ee5ab9a42fba81bc67e11d37f0db4b2fde8c43de0c19923c5d7\": container with ID starting with 5c2c257e405a3ee5ab9a42fba81bc67e11d37f0db4b2fde8c43de0c19923c5d7 not found: ID does not exist" containerID="5c2c257e405a3ee5ab9a42fba81bc67e11d37f0db4b2fde8c43de0c19923c5d7" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.517887 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c2c257e405a3ee5ab9a42fba81bc67e11d37f0db4b2fde8c43de0c19923c5d7"} err="failed to get container status \"5c2c257e405a3ee5ab9a42fba81bc67e11d37f0db4b2fde8c43de0c19923c5d7\": rpc error: code = NotFound desc = could not find container \"5c2c257e405a3ee5ab9a42fba81bc67e11d37f0db4b2fde8c43de0c19923c5d7\": container with ID starting with 5c2c257e405a3ee5ab9a42fba81bc67e11d37f0db4b2fde8c43de0c19923c5d7 not found: ID does not exist" Dec 02 12:15:12 crc kubenswrapper[4721]: I1202 12:15:12.851065 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" path="/var/lib/kubelet/pods/6db5858b-86d7-4b98-8d7f-26317521edcd/volumes" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.641434 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:15:14 crc kubenswrapper[4721]: E1202 12:15:14.642290 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="account-server" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.642313 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="account-server" Dec 02 12:15:14 crc kubenswrapper[4721]: E1202 12:15:14.642328 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="account-auditor" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.642339 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="account-auditor" Dec 02 12:15:14 crc kubenswrapper[4721]: E1202 12:15:14.642391 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="container-server" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.642404 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="container-server" Dec 02 12:15:14 crc kubenswrapper[4721]: E1202 12:15:14.642418 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="object-expirer" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.642449 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="object-expirer" Dec 02 12:15:14 crc kubenswrapper[4721]: E1202 12:15:14.642465 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="rsync" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.642477 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="rsync" Dec 02 12:15:14 crc kubenswrapper[4721]: E1202 12:15:14.642511 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="object-server" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.642522 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="object-server" Dec 02 12:15:14 crc kubenswrapper[4721]: E1202 12:15:14.642537 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="swift-recon-cron" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.642548 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="swift-recon-cron" Dec 02 12:15:14 crc kubenswrapper[4721]: E1202 12:15:14.642566 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="container-auditor" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.642578 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="container-auditor" Dec 02 12:15:14 crc kubenswrapper[4721]: E1202 12:15:14.642596 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="container-replicator" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.642607 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="container-replicator" Dec 02 12:15:14 crc kubenswrapper[4721]: E1202 12:15:14.642629 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="object-auditor" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.642640 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="object-auditor" Dec 02 12:15:14 crc kubenswrapper[4721]: E1202 12:15:14.642656 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="object-updater" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.642668 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="object-updater" Dec 02 12:15:14 crc kubenswrapper[4721]: E1202 12:15:14.642689 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="account-reaper" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.642700 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="account-reaper" Dec 02 12:15:14 crc kubenswrapper[4721]: E1202 12:15:14.642716 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="object-replicator" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.642726 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="object-replicator" Dec 02 12:15:14 crc kubenswrapper[4721]: E1202 12:15:14.642743 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="account-replicator" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.642754 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="account-replicator" Dec 02 12:15:14 crc kubenswrapper[4721]: E1202 12:15:14.642775 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41f40b3c-9bc1-40a1-8f9f-c234185f2d98" containerName="collect-profiles" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.642786 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="41f40b3c-9bc1-40a1-8f9f-c234185f2d98" containerName="collect-profiles" Dec 02 12:15:14 crc kubenswrapper[4721]: E1202 12:15:14.642809 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="container-updater" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.642819 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="container-updater" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.643073 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="container-replicator" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.643095 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="container-auditor" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.643110 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="object-auditor" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.643128 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="object-expirer" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.643144 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="object-replicator" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.643155 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="account-reaper" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.643168 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="container-updater" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.643182 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="41f40b3c-9bc1-40a1-8f9f-c234185f2d98" containerName="collect-profiles" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.643192 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="swift-recon-cron" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.643211 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="rsync" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.643224 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="object-updater" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.643241 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="object-server" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.643255 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="account-auditor" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.643276 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="account-server" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.643290 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="account-replicator" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.643303 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="6db5858b-86d7-4b98-8d7f-26317521edcd" containerName="container-server" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.650285 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.654612 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-conf" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.655071 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-storage-config-data" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.656054 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-swift-dockercfg-9c56j" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.656669 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-files" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.659327 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.789152 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-cache\") pod \"swift-storage-0\" (UID: \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.789232 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vcl6c\" (UniqueName: \"kubernetes.io/projected/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-kube-api-access-vcl6c\") pod \"swift-storage-0\" (UID: \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.789316 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-etc-swift\") pod \"swift-storage-0\" (UID: \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.789370 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-lock\") pod \"swift-storage-0\" (UID: \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.789396 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.890498 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vcl6c\" (UniqueName: \"kubernetes.io/projected/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-kube-api-access-vcl6c\") pod \"swift-storage-0\" (UID: \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.890629 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-etc-swift\") pod \"swift-storage-0\" (UID: \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.890678 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-lock\") pod \"swift-storage-0\" (UID: \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.890703 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.890730 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-cache\") pod \"swift-storage-0\" (UID: \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.891307 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-cache\") pod \"swift-storage-0\" (UID: \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:15:14 crc kubenswrapper[4721]: E1202 12:15:14.891745 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:15:14 crc kubenswrapper[4721]: E1202 12:15:14.891783 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 02 12:15:14 crc kubenswrapper[4721]: E1202 12:15:14.892432 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-etc-swift podName:bcc8bf37-541f-4278-9946-9f3bc6dd3f98 nodeName:}" failed. No retries permitted until 2025-12-02 12:15:15.392409815 +0000 UTC m=+1445.130960174 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-etc-swift") pod "swift-storage-0" (UID: "bcc8bf37-541f-4278-9946-9f3bc6dd3f98") : configmap "swift-ring-files" not found Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.892328 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-lock\") pod \"swift-storage-0\" (UID: \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.892248 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\") device mount path \"/mnt/openstack/pv10\"" pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.907953 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vcl6c\" (UniqueName: \"kubernetes.io/projected/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-kube-api-access-vcl6c\") pod \"swift-storage-0\" (UID: \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:15:14 crc kubenswrapper[4721]: I1202 12:15:14.920937 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.008113 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-wmvcs"] Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.009115 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.010653 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"swift-proxy-config-data" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.011527 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-config-data" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.011676 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"combined-ca-bundle" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.011844 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"swift-kuttl-tests"/"swift-ring-scripts" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.031986 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-wmvcs"] Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.093818 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d20da2b0-30a8-4ab9-a299-2328cb57d79b-swiftconf\") pod \"swift-ring-rebalance-wmvcs\" (UID: \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\") " pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.093877 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d20da2b0-30a8-4ab9-a299-2328cb57d79b-ring-data-devices\") pod \"swift-ring-rebalance-wmvcs\" (UID: \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\") " pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.093899 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d20da2b0-30a8-4ab9-a299-2328cb57d79b-scripts\") pod \"swift-ring-rebalance-wmvcs\" (UID: \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\") " pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.094005 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d20da2b0-30a8-4ab9-a299-2328cb57d79b-combined-ca-bundle\") pod \"swift-ring-rebalance-wmvcs\" (UID: \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\") " pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.094124 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqvp9\" (UniqueName: \"kubernetes.io/projected/d20da2b0-30a8-4ab9-a299-2328cb57d79b-kube-api-access-dqvp9\") pod \"swift-ring-rebalance-wmvcs\" (UID: \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\") " pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.094177 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d20da2b0-30a8-4ab9-a299-2328cb57d79b-etc-swift\") pod \"swift-ring-rebalance-wmvcs\" (UID: \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\") " pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.094224 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d20da2b0-30a8-4ab9-a299-2328cb57d79b-dispersionconf\") pod \"swift-ring-rebalance-wmvcs\" (UID: \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\") " pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.195578 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d20da2b0-30a8-4ab9-a299-2328cb57d79b-swiftconf\") pod \"swift-ring-rebalance-wmvcs\" (UID: \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\") " pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.195996 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d20da2b0-30a8-4ab9-a299-2328cb57d79b-ring-data-devices\") pod \"swift-ring-rebalance-wmvcs\" (UID: \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\") " pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.196967 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d20da2b0-30a8-4ab9-a299-2328cb57d79b-scripts\") pod \"swift-ring-rebalance-wmvcs\" (UID: \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\") " pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.197754 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d20da2b0-30a8-4ab9-a299-2328cb57d79b-combined-ca-bundle\") pod \"swift-ring-rebalance-wmvcs\" (UID: \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\") " pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.198321 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqvp9\" (UniqueName: \"kubernetes.io/projected/d20da2b0-30a8-4ab9-a299-2328cb57d79b-kube-api-access-dqvp9\") pod \"swift-ring-rebalance-wmvcs\" (UID: \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\") " pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.198910 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d20da2b0-30a8-4ab9-a299-2328cb57d79b-etc-swift\") pod \"swift-ring-rebalance-wmvcs\" (UID: \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\") " pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.199057 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d20da2b0-30a8-4ab9-a299-2328cb57d79b-dispersionconf\") pod \"swift-ring-rebalance-wmvcs\" (UID: \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\") " pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.196916 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d20da2b0-30a8-4ab9-a299-2328cb57d79b-ring-data-devices\") pod \"swift-ring-rebalance-wmvcs\" (UID: \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\") " pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.199268 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d20da2b0-30a8-4ab9-a299-2328cb57d79b-etc-swift\") pod \"swift-ring-rebalance-wmvcs\" (UID: \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\") " pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.197697 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d20da2b0-30a8-4ab9-a299-2328cb57d79b-scripts\") pod \"swift-ring-rebalance-wmvcs\" (UID: \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\") " pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.199668 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d20da2b0-30a8-4ab9-a299-2328cb57d79b-swiftconf\") pod \"swift-ring-rebalance-wmvcs\" (UID: \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\") " pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.200118 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d20da2b0-30a8-4ab9-a299-2328cb57d79b-combined-ca-bundle\") pod \"swift-ring-rebalance-wmvcs\" (UID: \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\") " pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.208027 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d20da2b0-30a8-4ab9-a299-2328cb57d79b-dispersionconf\") pod \"swift-ring-rebalance-wmvcs\" (UID: \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\") " pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.226391 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqvp9\" (UniqueName: \"kubernetes.io/projected/d20da2b0-30a8-4ab9-a299-2328cb57d79b-kube-api-access-dqvp9\") pod \"swift-ring-rebalance-wmvcs\" (UID: \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\") " pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.288834 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq"] Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.290451 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.292566 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"cert-swift-internal-svc" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.292723 4721 reflector.go:368] Caches populated for *v1.Secret from object-"swift-kuttl-tests"/"cert-swift-public-svc" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.304060 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq"] Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.331973 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.401242 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb4ffe44-922f-47be-8fb4-ff6646c76095-config-data\") pod \"swift-proxy-57f4bdfd6b-w5xrq\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.401311 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzg6s\" (UniqueName: \"kubernetes.io/projected/bb4ffe44-922f-47be-8fb4-ff6646c76095-kube-api-access-pzg6s\") pod \"swift-proxy-57f4bdfd6b-w5xrq\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.401379 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb4ffe44-922f-47be-8fb4-ff6646c76095-combined-ca-bundle\") pod \"swift-proxy-57f4bdfd6b-w5xrq\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.401406 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bb4ffe44-922f-47be-8fb4-ff6646c76095-log-httpd\") pod \"swift-proxy-57f4bdfd6b-w5xrq\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.401440 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bb4ffe44-922f-47be-8fb4-ff6646c76095-etc-swift\") pod \"swift-proxy-57f4bdfd6b-w5xrq\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.401484 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-etc-swift\") pod \"swift-storage-0\" (UID: \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.401526 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bb4ffe44-922f-47be-8fb4-ff6646c76095-public-tls-certs\") pod \"swift-proxy-57f4bdfd6b-w5xrq\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.401550 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bb4ffe44-922f-47be-8fb4-ff6646c76095-internal-tls-certs\") pod \"swift-proxy-57f4bdfd6b-w5xrq\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.401596 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bb4ffe44-922f-47be-8fb4-ff6646c76095-run-httpd\") pod \"swift-proxy-57f4bdfd6b-w5xrq\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:15 crc kubenswrapper[4721]: E1202 12:15:15.401777 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:15:15 crc kubenswrapper[4721]: E1202 12:15:15.401793 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 02 12:15:15 crc kubenswrapper[4721]: E1202 12:15:15.401835 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-etc-swift podName:bcc8bf37-541f-4278-9946-9f3bc6dd3f98 nodeName:}" failed. No retries permitted until 2025-12-02 12:15:16.401820161 +0000 UTC m=+1446.140370510 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-etc-swift") pod "swift-storage-0" (UID: "bcc8bf37-541f-4278-9946-9f3bc6dd3f98") : configmap "swift-ring-files" not found Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.502436 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bb4ffe44-922f-47be-8fb4-ff6646c76095-run-httpd\") pod \"swift-proxy-57f4bdfd6b-w5xrq\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.502484 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb4ffe44-922f-47be-8fb4-ff6646c76095-config-data\") pod \"swift-proxy-57f4bdfd6b-w5xrq\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.502525 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzg6s\" (UniqueName: \"kubernetes.io/projected/bb4ffe44-922f-47be-8fb4-ff6646c76095-kube-api-access-pzg6s\") pod \"swift-proxy-57f4bdfd6b-w5xrq\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.502558 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb4ffe44-922f-47be-8fb4-ff6646c76095-combined-ca-bundle\") pod \"swift-proxy-57f4bdfd6b-w5xrq\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.502575 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bb4ffe44-922f-47be-8fb4-ff6646c76095-log-httpd\") pod \"swift-proxy-57f4bdfd6b-w5xrq\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.502609 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bb4ffe44-922f-47be-8fb4-ff6646c76095-etc-swift\") pod \"swift-proxy-57f4bdfd6b-w5xrq\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.502656 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bb4ffe44-922f-47be-8fb4-ff6646c76095-public-tls-certs\") pod \"swift-proxy-57f4bdfd6b-w5xrq\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.502673 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bb4ffe44-922f-47be-8fb4-ff6646c76095-internal-tls-certs\") pod \"swift-proxy-57f4bdfd6b-w5xrq\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:15 crc kubenswrapper[4721]: E1202 12:15:15.502869 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:15:15 crc kubenswrapper[4721]: E1202 12:15:15.502901 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq: configmap "swift-ring-files" not found Dec 02 12:15:15 crc kubenswrapper[4721]: E1202 12:15:15.503044 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bb4ffe44-922f-47be-8fb4-ff6646c76095-etc-swift podName:bb4ffe44-922f-47be-8fb4-ff6646c76095 nodeName:}" failed. No retries permitted until 2025-12-02 12:15:16.002956947 +0000 UTC m=+1445.741507306 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bb4ffe44-922f-47be-8fb4-ff6646c76095-etc-swift") pod "swift-proxy-57f4bdfd6b-w5xrq" (UID: "bb4ffe44-922f-47be-8fb4-ff6646c76095") : configmap "swift-ring-files" not found Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.503054 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bb4ffe44-922f-47be-8fb4-ff6646c76095-run-httpd\") pod \"swift-proxy-57f4bdfd6b-w5xrq\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.503282 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bb4ffe44-922f-47be-8fb4-ff6646c76095-log-httpd\") pod \"swift-proxy-57f4bdfd6b-w5xrq\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.506775 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bb4ffe44-922f-47be-8fb4-ff6646c76095-internal-tls-certs\") pod \"swift-proxy-57f4bdfd6b-w5xrq\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.506901 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bb4ffe44-922f-47be-8fb4-ff6646c76095-public-tls-certs\") pod \"swift-proxy-57f4bdfd6b-w5xrq\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.507183 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb4ffe44-922f-47be-8fb4-ff6646c76095-combined-ca-bundle\") pod \"swift-proxy-57f4bdfd6b-w5xrq\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.507897 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb4ffe44-922f-47be-8fb4-ff6646c76095-config-data\") pod \"swift-proxy-57f4bdfd6b-w5xrq\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.525370 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzg6s\" (UniqueName: \"kubernetes.io/projected/bb4ffe44-922f-47be-8fb4-ff6646c76095-kube-api-access-pzg6s\") pod \"swift-proxy-57f4bdfd6b-w5xrq\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:15 crc kubenswrapper[4721]: I1202 12:15:15.745317 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-wmvcs"] Dec 02 12:15:15 crc kubenswrapper[4721]: W1202 12:15:15.748218 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd20da2b0_30a8_4ab9_a299_2328cb57d79b.slice/crio-49d21ccc1181c2cfa3546a128de8e1cec8b41a60e49b9e96909f084b76a4ffa4 WatchSource:0}: Error finding container 49d21ccc1181c2cfa3546a128de8e1cec8b41a60e49b9e96909f084b76a4ffa4: Status 404 returned error can't find the container with id 49d21ccc1181c2cfa3546a128de8e1cec8b41a60e49b9e96909f084b76a4ffa4 Dec 02 12:15:16 crc kubenswrapper[4721]: I1202 12:15:16.009695 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bb4ffe44-922f-47be-8fb4-ff6646c76095-etc-swift\") pod \"swift-proxy-57f4bdfd6b-w5xrq\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:16 crc kubenswrapper[4721]: E1202 12:15:16.009894 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:15:16 crc kubenswrapper[4721]: E1202 12:15:16.009923 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq: configmap "swift-ring-files" not found Dec 02 12:15:16 crc kubenswrapper[4721]: E1202 12:15:16.009988 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bb4ffe44-922f-47be-8fb4-ff6646c76095-etc-swift podName:bb4ffe44-922f-47be-8fb4-ff6646c76095 nodeName:}" failed. No retries permitted until 2025-12-02 12:15:17.009968059 +0000 UTC m=+1446.748518408 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bb4ffe44-922f-47be-8fb4-ff6646c76095-etc-swift") pod "swift-proxy-57f4bdfd6b-w5xrq" (UID: "bb4ffe44-922f-47be-8fb4-ff6646c76095") : configmap "swift-ring-files" not found Dec 02 12:15:16 crc kubenswrapper[4721]: I1202 12:15:16.165575 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" event={"ID":"d20da2b0-30a8-4ab9-a299-2328cb57d79b","Type":"ContainerStarted","Data":"1cd990a7ed91654d8b437e55c168088d85662fcde7818a64ee0d169f100c70f1"} Dec 02 12:15:16 crc kubenswrapper[4721]: I1202 12:15:16.165844 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" event={"ID":"d20da2b0-30a8-4ab9-a299-2328cb57d79b","Type":"ContainerStarted","Data":"49d21ccc1181c2cfa3546a128de8e1cec8b41a60e49b9e96909f084b76a4ffa4"} Dec 02 12:15:16 crc kubenswrapper[4721]: I1202 12:15:16.179844 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" podStartSLOduration=2.179827423 podStartE2EDuration="2.179827423s" podCreationTimestamp="2025-12-02 12:15:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:15:16.178256594 +0000 UTC m=+1445.916806943" watchObservedRunningTime="2025-12-02 12:15:16.179827423 +0000 UTC m=+1445.918377772" Dec 02 12:15:16 crc kubenswrapper[4721]: I1202 12:15:16.415233 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-etc-swift\") pod \"swift-storage-0\" (UID: \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:15:16 crc kubenswrapper[4721]: E1202 12:15:16.415441 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:15:16 crc kubenswrapper[4721]: E1202 12:15:16.415458 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 02 12:15:16 crc kubenswrapper[4721]: E1202 12:15:16.415522 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-etc-swift podName:bcc8bf37-541f-4278-9946-9f3bc6dd3f98 nodeName:}" failed. No retries permitted until 2025-12-02 12:15:18.415503714 +0000 UTC m=+1448.154054073 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-etc-swift") pod "swift-storage-0" (UID: "bcc8bf37-541f-4278-9946-9f3bc6dd3f98") : configmap "swift-ring-files" not found Dec 02 12:15:17 crc kubenswrapper[4721]: I1202 12:15:17.023778 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bb4ffe44-922f-47be-8fb4-ff6646c76095-etc-swift\") pod \"swift-proxy-57f4bdfd6b-w5xrq\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:17 crc kubenswrapper[4721]: E1202 12:15:17.023956 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:15:17 crc kubenswrapper[4721]: E1202 12:15:17.024003 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq: configmap "swift-ring-files" not found Dec 02 12:15:17 crc kubenswrapper[4721]: E1202 12:15:17.024106 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bb4ffe44-922f-47be-8fb4-ff6646c76095-etc-swift podName:bb4ffe44-922f-47be-8fb4-ff6646c76095 nodeName:}" failed. No retries permitted until 2025-12-02 12:15:19.024070245 +0000 UTC m=+1448.762620674 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bb4ffe44-922f-47be-8fb4-ff6646c76095-etc-swift") pod "swift-proxy-57f4bdfd6b-w5xrq" (UID: "bb4ffe44-922f-47be-8fb4-ff6646c76095") : configmap "swift-ring-files" not found Dec 02 12:15:18 crc kubenswrapper[4721]: I1202 12:15:18.448495 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-etc-swift\") pod \"swift-storage-0\" (UID: \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:15:18 crc kubenswrapper[4721]: E1202 12:15:18.448717 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:15:18 crc kubenswrapper[4721]: E1202 12:15:18.448829 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Dec 02 12:15:18 crc kubenswrapper[4721]: E1202 12:15:18.448880 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-etc-swift podName:bcc8bf37-541f-4278-9946-9f3bc6dd3f98 nodeName:}" failed. No retries permitted until 2025-12-02 12:15:22.448864605 +0000 UTC m=+1452.187414954 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-etc-swift") pod "swift-storage-0" (UID: "bcc8bf37-541f-4278-9946-9f3bc6dd3f98") : configmap "swift-ring-files" not found Dec 02 12:15:19 crc kubenswrapper[4721]: I1202 12:15:19.060681 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bb4ffe44-922f-47be-8fb4-ff6646c76095-etc-swift\") pod \"swift-proxy-57f4bdfd6b-w5xrq\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:19 crc kubenswrapper[4721]: E1202 12:15:19.060885 4721 projected.go:288] Couldn't get configMap swift-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Dec 02 12:15:19 crc kubenswrapper[4721]: E1202 12:15:19.060915 4721 projected.go:194] Error preparing data for projected volume etc-swift for pod swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq: configmap "swift-ring-files" not found Dec 02 12:15:19 crc kubenswrapper[4721]: E1202 12:15:19.060986 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bb4ffe44-922f-47be-8fb4-ff6646c76095-etc-swift podName:bb4ffe44-922f-47be-8fb4-ff6646c76095 nodeName:}" failed. No retries permitted until 2025-12-02 12:15:23.060965764 +0000 UTC m=+1452.799516123 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bb4ffe44-922f-47be-8fb4-ff6646c76095-etc-swift") pod "swift-proxy-57f4bdfd6b-w5xrq" (UID: "bb4ffe44-922f-47be-8fb4-ff6646c76095") : configmap "swift-ring-files" not found Dec 02 12:15:22 crc kubenswrapper[4721]: I1202 12:15:22.513898 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-etc-swift\") pod \"swift-storage-0\" (UID: \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:15:22 crc kubenswrapper[4721]: I1202 12:15:22.523763 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-etc-swift\") pod \"swift-storage-0\" (UID: \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\") " pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:15:22 crc kubenswrapper[4721]: I1202 12:15:22.778063 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:15:23 crc kubenswrapper[4721]: I1202 12:15:23.091091 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:15:23 crc kubenswrapper[4721]: I1202 12:15:23.134762 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bb4ffe44-922f-47be-8fb4-ff6646c76095-etc-swift\") pod \"swift-proxy-57f4bdfd6b-w5xrq\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:23 crc kubenswrapper[4721]: I1202 12:15:23.142102 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bb4ffe44-922f-47be-8fb4-ff6646c76095-etc-swift\") pod \"swift-proxy-57f4bdfd6b-w5xrq\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:23 crc kubenswrapper[4721]: I1202 12:15:23.237162 4721 generic.go:334] "Generic (PLEG): container finished" podID="d20da2b0-30a8-4ab9-a299-2328cb57d79b" containerID="1cd990a7ed91654d8b437e55c168088d85662fcde7818a64ee0d169f100c70f1" exitCode=0 Dec 02 12:15:23 crc kubenswrapper[4721]: I1202 12:15:23.237327 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" event={"ID":"d20da2b0-30a8-4ab9-a299-2328cb57d79b","Type":"ContainerDied","Data":"1cd990a7ed91654d8b437e55c168088d85662fcde7818a64ee0d169f100c70f1"} Dec 02 12:15:23 crc kubenswrapper[4721]: I1202 12:15:23.241743 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerStarted","Data":"6ea1e5d1df8948e1c6bd4c926443b7c85b73d8a41991bcddbe0605ae1f7f123e"} Dec 02 12:15:23 crc kubenswrapper[4721]: I1202 12:15:23.407750 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:23 crc kubenswrapper[4721]: I1202 12:15:23.876096 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq"] Dec 02 12:15:23 crc kubenswrapper[4721]: W1202 12:15:23.878839 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbb4ffe44_922f_47be_8fb4_ff6646c76095.slice/crio-691d3fc4d1c423a06f0605aa6d7b4829656e71f1c50ff0eb9003d4ef36ccc35b WatchSource:0}: Error finding container 691d3fc4d1c423a06f0605aa6d7b4829656e71f1c50ff0eb9003d4ef36ccc35b: Status 404 returned error can't find the container with id 691d3fc4d1c423a06f0605aa6d7b4829656e71f1c50ff0eb9003d4ef36ccc35b Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.253427 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerStarted","Data":"7535c7c14e13a49abeb74a94a8bd1a4ec491f65e82bb7e153101b798d0a62735"} Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.253819 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerStarted","Data":"85da596fb20638d2ee6c8e31bfab2b474609c2ca15b80b2b46485d52cb001e6c"} Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.253837 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerStarted","Data":"dfce69c9fc79882225cfd59549e428a1811ccb52cd0bb19488c3aac8e332d397"} Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.253852 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerStarted","Data":"a9b80e665586cd536e90d779913456d40abc1cf2319134a34a1e1f9b569d429a"} Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.253864 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerStarted","Data":"e87d10dd8b8dab75bb555cf482e6d111f41d0d24dd12714cf5a7e0fb9b6c68dd"} Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.253876 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerStarted","Data":"abd8319ab6735f6168875b0ddbc9759606251e471d269d41e0d09b48aeec277c"} Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.253886 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerStarted","Data":"49e1c87f8c375ff1765fe5bf980c00fc5eab00338c630665aa7cda483ae9774f"} Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.256061 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" event={"ID":"bb4ffe44-922f-47be-8fb4-ff6646c76095","Type":"ContainerStarted","Data":"04cc0e17430433379e0bd990252034d7bac6c09eefea122ddccf31f211217bb6"} Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.256100 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" event={"ID":"bb4ffe44-922f-47be-8fb4-ff6646c76095","Type":"ContainerStarted","Data":"5780b5652d3766cbf002fd44a7dc675d7d0a23cff348f25498cb77e3277ccb8b"} Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.256120 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" event={"ID":"bb4ffe44-922f-47be-8fb4-ff6646c76095","Type":"ContainerStarted","Data":"691d3fc4d1c423a06f0605aa6d7b4829656e71f1c50ff0eb9003d4ef36ccc35b"} Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.256329 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.291941 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" podStartSLOduration=9.291921787 podStartE2EDuration="9.291921787s" podCreationTimestamp="2025-12-02 12:15:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:15:24.288330936 +0000 UTC m=+1454.026881285" watchObservedRunningTime="2025-12-02 12:15:24.291921787 +0000 UTC m=+1454.030472136" Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.708332 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.863541 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d20da2b0-30a8-4ab9-a299-2328cb57d79b-scripts\") pod \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\" (UID: \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\") " Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.863580 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d20da2b0-30a8-4ab9-a299-2328cb57d79b-swiftconf\") pod \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\" (UID: \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\") " Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.863659 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dqvp9\" (UniqueName: \"kubernetes.io/projected/d20da2b0-30a8-4ab9-a299-2328cb57d79b-kube-api-access-dqvp9\") pod \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\" (UID: \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\") " Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.863683 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d20da2b0-30a8-4ab9-a299-2328cb57d79b-dispersionconf\") pod \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\" (UID: \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\") " Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.863737 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d20da2b0-30a8-4ab9-a299-2328cb57d79b-combined-ca-bundle\") pod \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\" (UID: \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\") " Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.863811 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d20da2b0-30a8-4ab9-a299-2328cb57d79b-etc-swift\") pod \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\" (UID: \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\") " Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.863835 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d20da2b0-30a8-4ab9-a299-2328cb57d79b-ring-data-devices\") pod \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\" (UID: \"d20da2b0-30a8-4ab9-a299-2328cb57d79b\") " Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.864832 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d20da2b0-30a8-4ab9-a299-2328cb57d79b-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "d20da2b0-30a8-4ab9-a299-2328cb57d79b" (UID: "d20da2b0-30a8-4ab9-a299-2328cb57d79b"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.865824 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d20da2b0-30a8-4ab9-a299-2328cb57d79b-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "d20da2b0-30a8-4ab9-a299-2328cb57d79b" (UID: "d20da2b0-30a8-4ab9-a299-2328cb57d79b"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.870632 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d20da2b0-30a8-4ab9-a299-2328cb57d79b-kube-api-access-dqvp9" (OuterVolumeSpecName: "kube-api-access-dqvp9") pod "d20da2b0-30a8-4ab9-a299-2328cb57d79b" (UID: "d20da2b0-30a8-4ab9-a299-2328cb57d79b"). InnerVolumeSpecName "kube-api-access-dqvp9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.886958 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d20da2b0-30a8-4ab9-a299-2328cb57d79b-scripts" (OuterVolumeSpecName: "scripts") pod "d20da2b0-30a8-4ab9-a299-2328cb57d79b" (UID: "d20da2b0-30a8-4ab9-a299-2328cb57d79b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.888395 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d20da2b0-30a8-4ab9-a299-2328cb57d79b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d20da2b0-30a8-4ab9-a299-2328cb57d79b" (UID: "d20da2b0-30a8-4ab9-a299-2328cb57d79b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.888442 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d20da2b0-30a8-4ab9-a299-2328cb57d79b-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "d20da2b0-30a8-4ab9-a299-2328cb57d79b" (UID: "d20da2b0-30a8-4ab9-a299-2328cb57d79b"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.906776 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d20da2b0-30a8-4ab9-a299-2328cb57d79b-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "d20da2b0-30a8-4ab9-a299-2328cb57d79b" (UID: "d20da2b0-30a8-4ab9-a299-2328cb57d79b"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.964897 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d20da2b0-30a8-4ab9-a299-2328cb57d79b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.964938 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/d20da2b0-30a8-4ab9-a299-2328cb57d79b-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.964955 4721 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/d20da2b0-30a8-4ab9-a299-2328cb57d79b-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.964964 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d20da2b0-30a8-4ab9-a299-2328cb57d79b-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.964974 4721 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/d20da2b0-30a8-4ab9-a299-2328cb57d79b-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.964984 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dqvp9\" (UniqueName: \"kubernetes.io/projected/d20da2b0-30a8-4ab9-a299-2328cb57d79b-kube-api-access-dqvp9\") on node \"crc\" DevicePath \"\"" Dec 02 12:15:24 crc kubenswrapper[4721]: I1202 12:15:24.964994 4721 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/d20da2b0-30a8-4ab9-a299-2328cb57d79b-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 02 12:15:25 crc kubenswrapper[4721]: I1202 12:15:25.263500 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" event={"ID":"d20da2b0-30a8-4ab9-a299-2328cb57d79b","Type":"ContainerDied","Data":"49d21ccc1181c2cfa3546a128de8e1cec8b41a60e49b9e96909f084b76a4ffa4"} Dec 02 12:15:25 crc kubenswrapper[4721]: I1202 12:15:25.263776 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="49d21ccc1181c2cfa3546a128de8e1cec8b41a60e49b9e96909f084b76a4ffa4" Dec 02 12:15:25 crc kubenswrapper[4721]: I1202 12:15:25.263832 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-ring-rebalance-wmvcs" Dec 02 12:15:25 crc kubenswrapper[4721]: I1202 12:15:25.271012 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerStarted","Data":"f817161f5bf9a3220d10cddfe03bbcd6e5d555181aaef609ae3adeaef6e3c32b"} Dec 02 12:15:25 crc kubenswrapper[4721]: I1202 12:15:25.271065 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:25 crc kubenswrapper[4721]: I1202 12:15:25.271078 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerStarted","Data":"14ecca04ef80f5d8e4a2b80d00d273cc59bc814f0482fbf9808823d48e7ae959"} Dec 02 12:15:25 crc kubenswrapper[4721]: I1202 12:15:25.271087 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerStarted","Data":"d2e4f2bca78e144d7fa5d6d767cabbd15693828067c39546f5bae979f0418d17"} Dec 02 12:15:25 crc kubenswrapper[4721]: I1202 12:15:25.271098 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerStarted","Data":"ccdaba9ae4a5c692130e0b1de87c1d710fc7ff0c2ba046a205de73912232fc68"} Dec 02 12:15:25 crc kubenswrapper[4721]: I1202 12:15:25.271107 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerStarted","Data":"2fdda2495d6640d2b4b1dfdce1d73bbb926c1de60d3c954ca9973fe6915db465"} Dec 02 12:15:25 crc kubenswrapper[4721]: I1202 12:15:25.271118 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerStarted","Data":"686e55e98a419eebb1f93b8885821ca8114186d02cdfadea0ade536a31fbff72"} Dec 02 12:15:25 crc kubenswrapper[4721]: I1202 12:15:25.703421 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mcr6s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 12:15:25 crc kubenswrapper[4721]: I1202 12:15:25.703485 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 12:15:25 crc kubenswrapper[4721]: I1202 12:15:25.703539 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" Dec 02 12:15:25 crc kubenswrapper[4721]: I1202 12:15:25.704192 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e66eb73009f2b3bd9c935e4952f8884d1696ff381f4f38bc986a2a57bb5b7ce6"} pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 12:15:25 crc kubenswrapper[4721]: I1202 12:15:25.704250 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" containerID="cri-o://e66eb73009f2b3bd9c935e4952f8884d1696ff381f4f38bc986a2a57bb5b7ce6" gracePeriod=600 Dec 02 12:15:26 crc kubenswrapper[4721]: I1202 12:15:26.284202 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerStarted","Data":"ed08fad96603a908329a4639c3da910532379455b2deed38d8c2bbb04078197e"} Dec 02 12:15:26 crc kubenswrapper[4721]: I1202 12:15:26.284508 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerStarted","Data":"d2623a652d84de904df19c60b58e60c9b42cddeb500d10591060dd57356f2193"} Dec 02 12:15:26 crc kubenswrapper[4721]: I1202 12:15:26.287265 4721 generic.go:334] "Generic (PLEG): container finished" podID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerID="e66eb73009f2b3bd9c935e4952f8884d1696ff381f4f38bc986a2a57bb5b7ce6" exitCode=0 Dec 02 12:15:26 crc kubenswrapper[4721]: I1202 12:15:26.287301 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" event={"ID":"141842b6-fd0c-48e6-9795-a627e6f96bd4","Type":"ContainerDied","Data":"e66eb73009f2b3bd9c935e4952f8884d1696ff381f4f38bc986a2a57bb5b7ce6"} Dec 02 12:15:26 crc kubenswrapper[4721]: I1202 12:15:26.287363 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" event={"ID":"141842b6-fd0c-48e6-9795-a627e6f96bd4","Type":"ContainerStarted","Data":"d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0"} Dec 02 12:15:26 crc kubenswrapper[4721]: I1202 12:15:26.287384 4721 scope.go:117] "RemoveContainer" containerID="53f7071ac35634b832b9710cc1dcc9432696ff76188da7c6520c40b950115114" Dec 02 12:15:26 crc kubenswrapper[4721]: I1202 12:15:26.317741 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/swift-storage-0" podStartSLOduration=13.317725204 podStartE2EDuration="13.317725204s" podCreationTimestamp="2025-12-02 12:15:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:15:26.316201407 +0000 UTC m=+1456.054751766" watchObservedRunningTime="2025-12-02 12:15:26.317725204 +0000 UTC m=+1456.056275553" Dec 02 12:15:33 crc kubenswrapper[4721]: I1202 12:15:33.413770 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:33 crc kubenswrapper[4721]: I1202 12:15:33.415930 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:35 crc kubenswrapper[4721]: I1202 12:15:35.388319 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:15:35 crc kubenswrapper[4721]: I1202 12:15:35.391277 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="account-server" containerID="cri-o://49e1c87f8c375ff1765fe5bf980c00fc5eab00338c630665aa7cda483ae9774f" gracePeriod=30 Dec 02 12:15:35 crc kubenswrapper[4721]: I1202 12:15:35.391336 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="object-expirer" containerID="cri-o://f817161f5bf9a3220d10cddfe03bbcd6e5d555181aaef609ae3adeaef6e3c32b" gracePeriod=30 Dec 02 12:15:35 crc kubenswrapper[4721]: I1202 12:15:35.391373 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="object-replicator" containerID="cri-o://ccdaba9ae4a5c692130e0b1de87c1d710fc7ff0c2ba046a205de73912232fc68" gracePeriod=30 Dec 02 12:15:35 crc kubenswrapper[4721]: I1202 12:15:35.391392 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="object-updater" containerID="cri-o://14ecca04ef80f5d8e4a2b80d00d273cc59bc814f0482fbf9808823d48e7ae959" gracePeriod=30 Dec 02 12:15:35 crc kubenswrapper[4721]: I1202 12:15:35.391447 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="account-reaper" containerID="cri-o://a9b80e665586cd536e90d779913456d40abc1cf2319134a34a1e1f9b569d429a" gracePeriod=30 Dec 02 12:15:35 crc kubenswrapper[4721]: I1202 12:15:35.391375 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="object-auditor" containerID="cri-o://d2e4f2bca78e144d7fa5d6d767cabbd15693828067c39546f5bae979f0418d17" gracePeriod=30 Dec 02 12:15:35 crc kubenswrapper[4721]: I1202 12:15:35.391477 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="swift-recon-cron" containerID="cri-o://ed08fad96603a908329a4639c3da910532379455b2deed38d8c2bbb04078197e" gracePeriod=30 Dec 02 12:15:35 crc kubenswrapper[4721]: I1202 12:15:35.391336 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="object-server" containerID="cri-o://2fdda2495d6640d2b4b1dfdce1d73bbb926c1de60d3c954ca9973fe6915db465" gracePeriod=30 Dec 02 12:15:35 crc kubenswrapper[4721]: I1202 12:15:35.391497 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="account-auditor" containerID="cri-o://e87d10dd8b8dab75bb555cf482e6d111f41d0d24dd12714cf5a7e0fb9b6c68dd" gracePeriod=30 Dec 02 12:15:35 crc kubenswrapper[4721]: I1202 12:15:35.391493 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="container-server" containerID="cri-o://dfce69c9fc79882225cfd59549e428a1811ccb52cd0bb19488c3aac8e332d397" gracePeriod=30 Dec 02 12:15:35 crc kubenswrapper[4721]: I1202 12:15:35.391563 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="rsync" containerID="cri-o://d2623a652d84de904df19c60b58e60c9b42cddeb500d10591060dd57356f2193" gracePeriod=30 Dec 02 12:15:35 crc kubenswrapper[4721]: I1202 12:15:35.391527 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="container-updater" containerID="cri-o://686e55e98a419eebb1f93b8885821ca8114186d02cdfadea0ade536a31fbff72" gracePeriod=30 Dec 02 12:15:35 crc kubenswrapper[4721]: I1202 12:15:35.391579 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="account-replicator" containerID="cri-o://abd8319ab6735f6168875b0ddbc9759606251e471d269d41e0d09b48aeec277c" gracePeriod=30 Dec 02 12:15:35 crc kubenswrapper[4721]: I1202 12:15:35.391635 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="container-replicator" containerID="cri-o://85da596fb20638d2ee6c8e31bfab2b474609c2ca15b80b2b46485d52cb001e6c" gracePeriod=30 Dec 02 12:15:35 crc kubenswrapper[4721]: I1202 12:15:35.391517 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-storage-0" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="container-auditor" containerID="cri-o://7535c7c14e13a49abeb74a94a8bd1a4ec491f65e82bb7e153101b798d0a62735" gracePeriod=30 Dec 02 12:15:35 crc kubenswrapper[4721]: I1202 12:15:35.396445 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-wmvcs"] Dec 02 12:15:35 crc kubenswrapper[4721]: I1202 12:15:35.415407 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-ring-rebalance-wmvcs"] Dec 02 12:15:35 crc kubenswrapper[4721]: I1202 12:15:35.449226 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq"] Dec 02 12:15:35 crc kubenswrapper[4721]: I1202 12:15:35.449719 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" podUID="bb4ffe44-922f-47be-8fb4-ff6646c76095" containerName="proxy-httpd" containerID="cri-o://5780b5652d3766cbf002fd44a7dc675d7d0a23cff348f25498cb77e3277ccb8b" gracePeriod=30 Dec 02 12:15:35 crc kubenswrapper[4721]: I1202 12:15:35.450236 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" podUID="bb4ffe44-922f-47be-8fb4-ff6646c76095" containerName="proxy-server" containerID="cri-o://04cc0e17430433379e0bd990252034d7bac6c09eefea122ddccf31f211217bb6" gracePeriod=30 Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.116094 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.226564 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bb4ffe44-922f-47be-8fb4-ff6646c76095-log-httpd\") pod \"bb4ffe44-922f-47be-8fb4-ff6646c76095\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.226611 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb4ffe44-922f-47be-8fb4-ff6646c76095-combined-ca-bundle\") pod \"bb4ffe44-922f-47be-8fb4-ff6646c76095\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.226664 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bb4ffe44-922f-47be-8fb4-ff6646c76095-public-tls-certs\") pod \"bb4ffe44-922f-47be-8fb4-ff6646c76095\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.226739 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pzg6s\" (UniqueName: \"kubernetes.io/projected/bb4ffe44-922f-47be-8fb4-ff6646c76095-kube-api-access-pzg6s\") pod \"bb4ffe44-922f-47be-8fb4-ff6646c76095\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.226759 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bb4ffe44-922f-47be-8fb4-ff6646c76095-etc-swift\") pod \"bb4ffe44-922f-47be-8fb4-ff6646c76095\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.226776 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb4ffe44-922f-47be-8fb4-ff6646c76095-config-data\") pod \"bb4ffe44-922f-47be-8fb4-ff6646c76095\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.226802 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bb4ffe44-922f-47be-8fb4-ff6646c76095-internal-tls-certs\") pod \"bb4ffe44-922f-47be-8fb4-ff6646c76095\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.226826 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bb4ffe44-922f-47be-8fb4-ff6646c76095-run-httpd\") pod \"bb4ffe44-922f-47be-8fb4-ff6646c76095\" (UID: \"bb4ffe44-922f-47be-8fb4-ff6646c76095\") " Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.228193 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb4ffe44-922f-47be-8fb4-ff6646c76095-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "bb4ffe44-922f-47be-8fb4-ff6646c76095" (UID: "bb4ffe44-922f-47be-8fb4-ff6646c76095"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.228442 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb4ffe44-922f-47be-8fb4-ff6646c76095-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "bb4ffe44-922f-47be-8fb4-ff6646c76095" (UID: "bb4ffe44-922f-47be-8fb4-ff6646c76095"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.234740 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb4ffe44-922f-47be-8fb4-ff6646c76095-kube-api-access-pzg6s" (OuterVolumeSpecName: "kube-api-access-pzg6s") pod "bb4ffe44-922f-47be-8fb4-ff6646c76095" (UID: "bb4ffe44-922f-47be-8fb4-ff6646c76095"). InnerVolumeSpecName "kube-api-access-pzg6s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.234880 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb4ffe44-922f-47be-8fb4-ff6646c76095-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "bb4ffe44-922f-47be-8fb4-ff6646c76095" (UID: "bb4ffe44-922f-47be-8fb4-ff6646c76095"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.267751 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb4ffe44-922f-47be-8fb4-ff6646c76095-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bb4ffe44-922f-47be-8fb4-ff6646c76095" (UID: "bb4ffe44-922f-47be-8fb4-ff6646c76095"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.269704 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb4ffe44-922f-47be-8fb4-ff6646c76095-config-data" (OuterVolumeSpecName: "config-data") pod "bb4ffe44-922f-47be-8fb4-ff6646c76095" (UID: "bb4ffe44-922f-47be-8fb4-ff6646c76095"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.276581 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb4ffe44-922f-47be-8fb4-ff6646c76095-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "bb4ffe44-922f-47be-8fb4-ff6646c76095" (UID: "bb4ffe44-922f-47be-8fb4-ff6646c76095"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.283841 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb4ffe44-922f-47be-8fb4-ff6646c76095-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "bb4ffe44-922f-47be-8fb4-ff6646c76095" (UID: "bb4ffe44-922f-47be-8fb4-ff6646c76095"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.328878 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bb4ffe44-922f-47be-8fb4-ff6646c76095-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.328916 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb4ffe44-922f-47be-8fb4-ff6646c76095-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.328929 4721 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bb4ffe44-922f-47be-8fb4-ff6646c76095-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.328943 4721 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bb4ffe44-922f-47be-8fb4-ff6646c76095-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.328955 4721 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bb4ffe44-922f-47be-8fb4-ff6646c76095-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.328967 4721 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb4ffe44-922f-47be-8fb4-ff6646c76095-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.328978 4721 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bb4ffe44-922f-47be-8fb4-ff6646c76095-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.328995 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pzg6s\" (UniqueName: \"kubernetes.io/projected/bb4ffe44-922f-47be-8fb4-ff6646c76095-kube-api-access-pzg6s\") on node \"crc\" DevicePath \"\"" Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.373819 4721 generic.go:334] "Generic (PLEG): container finished" podID="bb4ffe44-922f-47be-8fb4-ff6646c76095" containerID="04cc0e17430433379e0bd990252034d7bac6c09eefea122ddccf31f211217bb6" exitCode=0 Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.373865 4721 generic.go:334] "Generic (PLEG): container finished" podID="bb4ffe44-922f-47be-8fb4-ff6646c76095" containerID="5780b5652d3766cbf002fd44a7dc675d7d0a23cff348f25498cb77e3277ccb8b" exitCode=0 Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.373934 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.374278 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" event={"ID":"bb4ffe44-922f-47be-8fb4-ff6646c76095","Type":"ContainerDied","Data":"04cc0e17430433379e0bd990252034d7bac6c09eefea122ddccf31f211217bb6"} Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.374437 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" event={"ID":"bb4ffe44-922f-47be-8fb4-ff6646c76095","Type":"ContainerDied","Data":"5780b5652d3766cbf002fd44a7dc675d7d0a23cff348f25498cb77e3277ccb8b"} Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.374532 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq" event={"ID":"bb4ffe44-922f-47be-8fb4-ff6646c76095","Type":"ContainerDied","Data":"691d3fc4d1c423a06f0605aa6d7b4829656e71f1c50ff0eb9003d4ef36ccc35b"} Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.374471 4721 scope.go:117] "RemoveContainer" containerID="04cc0e17430433379e0bd990252034d7bac6c09eefea122ddccf31f211217bb6" Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.382892 4721 generic.go:334] "Generic (PLEG): container finished" podID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerID="d2623a652d84de904df19c60b58e60c9b42cddeb500d10591060dd57356f2193" exitCode=0 Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.383082 4721 generic.go:334] "Generic (PLEG): container finished" podID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerID="f817161f5bf9a3220d10cddfe03bbcd6e5d555181aaef609ae3adeaef6e3c32b" exitCode=0 Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.382971 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerDied","Data":"d2623a652d84de904df19c60b58e60c9b42cddeb500d10591060dd57356f2193"} Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.383182 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerDied","Data":"f817161f5bf9a3220d10cddfe03bbcd6e5d555181aaef609ae3adeaef6e3c32b"} Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.383202 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerDied","Data":"14ecca04ef80f5d8e4a2b80d00d273cc59bc814f0482fbf9808823d48e7ae959"} Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.383147 4721 generic.go:334] "Generic (PLEG): container finished" podID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerID="14ecca04ef80f5d8e4a2b80d00d273cc59bc814f0482fbf9808823d48e7ae959" exitCode=0 Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.383226 4721 generic.go:334] "Generic (PLEG): container finished" podID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerID="d2e4f2bca78e144d7fa5d6d767cabbd15693828067c39546f5bae979f0418d17" exitCode=0 Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.383239 4721 generic.go:334] "Generic (PLEG): container finished" podID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerID="ccdaba9ae4a5c692130e0b1de87c1d710fc7ff0c2ba046a205de73912232fc68" exitCode=0 Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.383249 4721 generic.go:334] "Generic (PLEG): container finished" podID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerID="2fdda2495d6640d2b4b1dfdce1d73bbb926c1de60d3c954ca9973fe6915db465" exitCode=0 Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.383259 4721 generic.go:334] "Generic (PLEG): container finished" podID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerID="686e55e98a419eebb1f93b8885821ca8114186d02cdfadea0ade536a31fbff72" exitCode=0 Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.383268 4721 generic.go:334] "Generic (PLEG): container finished" podID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerID="7535c7c14e13a49abeb74a94a8bd1a4ec491f65e82bb7e153101b798d0a62735" exitCode=0 Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.383277 4721 generic.go:334] "Generic (PLEG): container finished" podID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerID="85da596fb20638d2ee6c8e31bfab2b474609c2ca15b80b2b46485d52cb001e6c" exitCode=0 Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.383266 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerDied","Data":"d2e4f2bca78e144d7fa5d6d767cabbd15693828067c39546f5bae979f0418d17"} Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.383333 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerDied","Data":"ccdaba9ae4a5c692130e0b1de87c1d710fc7ff0c2ba046a205de73912232fc68"} Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.383359 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerDied","Data":"2fdda2495d6640d2b4b1dfdce1d73bbb926c1de60d3c954ca9973fe6915db465"} Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.383369 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerDied","Data":"686e55e98a419eebb1f93b8885821ca8114186d02cdfadea0ade536a31fbff72"} Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.383379 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerDied","Data":"7535c7c14e13a49abeb74a94a8bd1a4ec491f65e82bb7e153101b798d0a62735"} Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.383390 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerDied","Data":"85da596fb20638d2ee6c8e31bfab2b474609c2ca15b80b2b46485d52cb001e6c"} Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.383400 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerDied","Data":"dfce69c9fc79882225cfd59549e428a1811ccb52cd0bb19488c3aac8e332d397"} Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.383287 4721 generic.go:334] "Generic (PLEG): container finished" podID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerID="dfce69c9fc79882225cfd59549e428a1811ccb52cd0bb19488c3aac8e332d397" exitCode=0 Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.383423 4721 generic.go:334] "Generic (PLEG): container finished" podID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerID="a9b80e665586cd536e90d779913456d40abc1cf2319134a34a1e1f9b569d429a" exitCode=0 Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.383436 4721 generic.go:334] "Generic (PLEG): container finished" podID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerID="e87d10dd8b8dab75bb555cf482e6d111f41d0d24dd12714cf5a7e0fb9b6c68dd" exitCode=0 Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.383447 4721 generic.go:334] "Generic (PLEG): container finished" podID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerID="abd8319ab6735f6168875b0ddbc9759606251e471d269d41e0d09b48aeec277c" exitCode=0 Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.383455 4721 generic.go:334] "Generic (PLEG): container finished" podID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerID="49e1c87f8c375ff1765fe5bf980c00fc5eab00338c630665aa7cda483ae9774f" exitCode=0 Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.383472 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerDied","Data":"a9b80e665586cd536e90d779913456d40abc1cf2319134a34a1e1f9b569d429a"} Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.383486 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerDied","Data":"e87d10dd8b8dab75bb555cf482e6d111f41d0d24dd12714cf5a7e0fb9b6c68dd"} Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.383495 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerDied","Data":"abd8319ab6735f6168875b0ddbc9759606251e471d269d41e0d09b48aeec277c"} Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.383507 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerDied","Data":"49e1c87f8c375ff1765fe5bf980c00fc5eab00338c630665aa7cda483ae9774f"} Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.397001 4721 scope.go:117] "RemoveContainer" containerID="5780b5652d3766cbf002fd44a7dc675d7d0a23cff348f25498cb77e3277ccb8b" Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.413624 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq"] Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.420532 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-proxy-57f4bdfd6b-w5xrq"] Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.421015 4721 scope.go:117] "RemoveContainer" containerID="04cc0e17430433379e0bd990252034d7bac6c09eefea122ddccf31f211217bb6" Dec 02 12:15:36 crc kubenswrapper[4721]: E1202 12:15:36.421650 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04cc0e17430433379e0bd990252034d7bac6c09eefea122ddccf31f211217bb6\": container with ID starting with 04cc0e17430433379e0bd990252034d7bac6c09eefea122ddccf31f211217bb6 not found: ID does not exist" containerID="04cc0e17430433379e0bd990252034d7bac6c09eefea122ddccf31f211217bb6" Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.421697 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04cc0e17430433379e0bd990252034d7bac6c09eefea122ddccf31f211217bb6"} err="failed to get container status \"04cc0e17430433379e0bd990252034d7bac6c09eefea122ddccf31f211217bb6\": rpc error: code = NotFound desc = could not find container \"04cc0e17430433379e0bd990252034d7bac6c09eefea122ddccf31f211217bb6\": container with ID starting with 04cc0e17430433379e0bd990252034d7bac6c09eefea122ddccf31f211217bb6 not found: ID does not exist" Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.421726 4721 scope.go:117] "RemoveContainer" containerID="5780b5652d3766cbf002fd44a7dc675d7d0a23cff348f25498cb77e3277ccb8b" Dec 02 12:15:36 crc kubenswrapper[4721]: E1202 12:15:36.422204 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5780b5652d3766cbf002fd44a7dc675d7d0a23cff348f25498cb77e3277ccb8b\": container with ID starting with 5780b5652d3766cbf002fd44a7dc675d7d0a23cff348f25498cb77e3277ccb8b not found: ID does not exist" containerID="5780b5652d3766cbf002fd44a7dc675d7d0a23cff348f25498cb77e3277ccb8b" Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.422232 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5780b5652d3766cbf002fd44a7dc675d7d0a23cff348f25498cb77e3277ccb8b"} err="failed to get container status \"5780b5652d3766cbf002fd44a7dc675d7d0a23cff348f25498cb77e3277ccb8b\": rpc error: code = NotFound desc = could not find container \"5780b5652d3766cbf002fd44a7dc675d7d0a23cff348f25498cb77e3277ccb8b\": container with ID starting with 5780b5652d3766cbf002fd44a7dc675d7d0a23cff348f25498cb77e3277ccb8b not found: ID does not exist" Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.422254 4721 scope.go:117] "RemoveContainer" containerID="04cc0e17430433379e0bd990252034d7bac6c09eefea122ddccf31f211217bb6" Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.422712 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04cc0e17430433379e0bd990252034d7bac6c09eefea122ddccf31f211217bb6"} err="failed to get container status \"04cc0e17430433379e0bd990252034d7bac6c09eefea122ddccf31f211217bb6\": rpc error: code = NotFound desc = could not find container \"04cc0e17430433379e0bd990252034d7bac6c09eefea122ddccf31f211217bb6\": container with ID starting with 04cc0e17430433379e0bd990252034d7bac6c09eefea122ddccf31f211217bb6 not found: ID does not exist" Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.422757 4721 scope.go:117] "RemoveContainer" containerID="5780b5652d3766cbf002fd44a7dc675d7d0a23cff348f25498cb77e3277ccb8b" Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.423277 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5780b5652d3766cbf002fd44a7dc675d7d0a23cff348f25498cb77e3277ccb8b"} err="failed to get container status \"5780b5652d3766cbf002fd44a7dc675d7d0a23cff348f25498cb77e3277ccb8b\": rpc error: code = NotFound desc = could not find container \"5780b5652d3766cbf002fd44a7dc675d7d0a23cff348f25498cb77e3277ccb8b\": container with ID starting with 5780b5652d3766cbf002fd44a7dc675d7d0a23cff348f25498cb77e3277ccb8b not found: ID does not exist" Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.848366 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb4ffe44-922f-47be-8fb4-ff6646c76095" path="/var/lib/kubelet/pods/bb4ffe44-922f-47be-8fb4-ff6646c76095/volumes" Dec 02 12:15:36 crc kubenswrapper[4721]: I1202 12:15:36.849440 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d20da2b0-30a8-4ab9-a299-2328cb57d79b" path="/var/lib/kubelet/pods/d20da2b0-30a8-4ab9-a299-2328cb57d79b/volumes" Dec 02 12:16:05 crc kubenswrapper[4721]: I1202 12:16:05.683207 4721 generic.go:334] "Generic (PLEG): container finished" podID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerID="ed08fad96603a908329a4639c3da910532379455b2deed38d8c2bbb04078197e" exitCode=137 Dec 02 12:16:05 crc kubenswrapper[4721]: I1202 12:16:05.683293 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerDied","Data":"ed08fad96603a908329a4639c3da910532379455b2deed38d8c2bbb04078197e"} Dec 02 12:16:05 crc kubenswrapper[4721]: I1202 12:16:05.762148 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:16:05 crc kubenswrapper[4721]: I1202 12:16:05.895625 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vcl6c\" (UniqueName: \"kubernetes.io/projected/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-kube-api-access-vcl6c\") pod \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\" (UID: \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\") " Dec 02 12:16:05 crc kubenswrapper[4721]: I1202 12:16:05.895716 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\" (UID: \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\") " Dec 02 12:16:05 crc kubenswrapper[4721]: I1202 12:16:05.895758 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-lock\") pod \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\" (UID: \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\") " Dec 02 12:16:05 crc kubenswrapper[4721]: I1202 12:16:05.895793 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-cache\") pod \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\" (UID: \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\") " Dec 02 12:16:05 crc kubenswrapper[4721]: I1202 12:16:05.895819 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-etc-swift\") pod \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\" (UID: \"bcc8bf37-541f-4278-9946-9f3bc6dd3f98\") " Dec 02 12:16:05 crc kubenswrapper[4721]: I1202 12:16:05.896519 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-lock" (OuterVolumeSpecName: "lock") pod "bcc8bf37-541f-4278-9946-9f3bc6dd3f98" (UID: "bcc8bf37-541f-4278-9946-9f3bc6dd3f98"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:16:05 crc kubenswrapper[4721]: I1202 12:16:05.896859 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-cache" (OuterVolumeSpecName: "cache") pod "bcc8bf37-541f-4278-9946-9f3bc6dd3f98" (UID: "bcc8bf37-541f-4278-9946-9f3bc6dd3f98"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:16:05 crc kubenswrapper[4721]: I1202 12:16:05.897321 4721 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-lock\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:05 crc kubenswrapper[4721]: I1202 12:16:05.897534 4721 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-cache\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:05 crc kubenswrapper[4721]: I1202 12:16:05.902628 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-kube-api-access-vcl6c" (OuterVolumeSpecName: "kube-api-access-vcl6c") pod "bcc8bf37-541f-4278-9946-9f3bc6dd3f98" (UID: "bcc8bf37-541f-4278-9946-9f3bc6dd3f98"). InnerVolumeSpecName "kube-api-access-vcl6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:16:05 crc kubenswrapper[4721]: I1202 12:16:05.902933 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "bcc8bf37-541f-4278-9946-9f3bc6dd3f98" (UID: "bcc8bf37-541f-4278-9946-9f3bc6dd3f98"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:16:05 crc kubenswrapper[4721]: I1202 12:16:05.903512 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "swift") pod "bcc8bf37-541f-4278-9946-9f3bc6dd3f98" (UID: "bcc8bf37-541f-4278-9946-9f3bc6dd3f98"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 12:16:05 crc kubenswrapper[4721]: I1202 12:16:05.998693 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vcl6c\" (UniqueName: \"kubernetes.io/projected/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-kube-api-access-vcl6c\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:05 crc kubenswrapper[4721]: I1202 12:16:05.998763 4721 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Dec 02 12:16:05 crc kubenswrapper[4721]: I1202 12:16:05.998780 4721 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bcc8bf37-541f-4278-9946-9f3bc6dd3f98-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:06 crc kubenswrapper[4721]: I1202 12:16:06.011506 4721 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Dec 02 12:16:06 crc kubenswrapper[4721]: I1202 12:16:06.099884 4721 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:06 crc kubenswrapper[4721]: I1202 12:16:06.704795 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/swift-storage-0" event={"ID":"bcc8bf37-541f-4278-9946-9f3bc6dd3f98","Type":"ContainerDied","Data":"6ea1e5d1df8948e1c6bd4c926443b7c85b73d8a41991bcddbe0605ae1f7f123e"} Dec 02 12:16:06 crc kubenswrapper[4721]: I1202 12:16:06.704864 4721 scope.go:117] "RemoveContainer" containerID="ed08fad96603a908329a4639c3da910532379455b2deed38d8c2bbb04078197e" Dec 02 12:16:06 crc kubenswrapper[4721]: I1202 12:16:06.705108 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/swift-storage-0" Dec 02 12:16:06 crc kubenswrapper[4721]: I1202 12:16:06.771375 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:16:06 crc kubenswrapper[4721]: I1202 12:16:06.771793 4721 scope.go:117] "RemoveContainer" containerID="d2623a652d84de904df19c60b58e60c9b42cddeb500d10591060dd57356f2193" Dec 02 12:16:06 crc kubenswrapper[4721]: I1202 12:16:06.777228 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/swift-storage-0"] Dec 02 12:16:06 crc kubenswrapper[4721]: I1202 12:16:06.787649 4721 scope.go:117] "RemoveContainer" containerID="f817161f5bf9a3220d10cddfe03bbcd6e5d555181aaef609ae3adeaef6e3c32b" Dec 02 12:16:06 crc kubenswrapper[4721]: I1202 12:16:06.806212 4721 scope.go:117] "RemoveContainer" containerID="14ecca04ef80f5d8e4a2b80d00d273cc59bc814f0482fbf9808823d48e7ae959" Dec 02 12:16:06 crc kubenswrapper[4721]: I1202 12:16:06.820371 4721 scope.go:117] "RemoveContainer" containerID="d2e4f2bca78e144d7fa5d6d767cabbd15693828067c39546f5bae979f0418d17" Dec 02 12:16:06 crc kubenswrapper[4721]: I1202 12:16:06.838243 4721 scope.go:117] "RemoveContainer" containerID="ccdaba9ae4a5c692130e0b1de87c1d710fc7ff0c2ba046a205de73912232fc68" Dec 02 12:16:06 crc kubenswrapper[4721]: I1202 12:16:06.847707 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" path="/var/lib/kubelet/pods/bcc8bf37-541f-4278-9946-9f3bc6dd3f98/volumes" Dec 02 12:16:06 crc kubenswrapper[4721]: I1202 12:16:06.853630 4721 scope.go:117] "RemoveContainer" containerID="2fdda2495d6640d2b4b1dfdce1d73bbb926c1de60d3c954ca9973fe6915db465" Dec 02 12:16:06 crc kubenswrapper[4721]: I1202 12:16:06.868481 4721 scope.go:117] "RemoveContainer" containerID="686e55e98a419eebb1f93b8885821ca8114186d02cdfadea0ade536a31fbff72" Dec 02 12:16:06 crc kubenswrapper[4721]: I1202 12:16:06.888788 4721 scope.go:117] "RemoveContainer" containerID="7535c7c14e13a49abeb74a94a8bd1a4ec491f65e82bb7e153101b798d0a62735" Dec 02 12:16:06 crc kubenswrapper[4721]: I1202 12:16:06.908331 4721 scope.go:117] "RemoveContainer" containerID="85da596fb20638d2ee6c8e31bfab2b474609c2ca15b80b2b46485d52cb001e6c" Dec 02 12:16:06 crc kubenswrapper[4721]: I1202 12:16:06.924393 4721 scope.go:117] "RemoveContainer" containerID="dfce69c9fc79882225cfd59549e428a1811ccb52cd0bb19488c3aac8e332d397" Dec 02 12:16:06 crc kubenswrapper[4721]: I1202 12:16:06.938447 4721 scope.go:117] "RemoveContainer" containerID="a9b80e665586cd536e90d779913456d40abc1cf2319134a34a1e1f9b569d429a" Dec 02 12:16:06 crc kubenswrapper[4721]: I1202 12:16:06.951455 4721 scope.go:117] "RemoveContainer" containerID="e87d10dd8b8dab75bb555cf482e6d111f41d0d24dd12714cf5a7e0fb9b6c68dd" Dec 02 12:16:06 crc kubenswrapper[4721]: I1202 12:16:06.964993 4721 scope.go:117] "RemoveContainer" containerID="abd8319ab6735f6168875b0ddbc9759606251e471d269d41e0d09b48aeec277c" Dec 02 12:16:06 crc kubenswrapper[4721]: I1202 12:16:06.977533 4721 scope.go:117] "RemoveContainer" containerID="49e1c87f8c375ff1765fe5bf980c00fc5eab00338c630665aa7cda483ae9774f" Dec 02 12:16:13 crc kubenswrapper[4721]: I1202 12:16:13.391720 4721 scope.go:117] "RemoveContainer" containerID="593e4196f57767a499b514086441727d87211b11c9c65064e51d7ed545e8e456" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.399952 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/barbican-db-sync-h58xl"] Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.407412 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/barbican-db-sync-h58xl"] Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.443962 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/barbican958c-account-delete-77bvr"] Dec 02 12:16:14 crc kubenswrapper[4721]: E1202 12:16:14.444300 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="object-expirer" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444311 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="object-expirer" Dec 02 12:16:14 crc kubenswrapper[4721]: E1202 12:16:14.444324 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="object-replicator" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444329 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="object-replicator" Dec 02 12:16:14 crc kubenswrapper[4721]: E1202 12:16:14.444335 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="account-server" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444355 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="account-server" Dec 02 12:16:14 crc kubenswrapper[4721]: E1202 12:16:14.444362 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="object-server" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444368 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="object-server" Dec 02 12:16:14 crc kubenswrapper[4721]: E1202 12:16:14.444378 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d20da2b0-30a8-4ab9-a299-2328cb57d79b" containerName="swift-ring-rebalance" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444385 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="d20da2b0-30a8-4ab9-a299-2328cb57d79b" containerName="swift-ring-rebalance" Dec 02 12:16:14 crc kubenswrapper[4721]: E1202 12:16:14.444393 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="swift-recon-cron" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444399 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="swift-recon-cron" Dec 02 12:16:14 crc kubenswrapper[4721]: E1202 12:16:14.444406 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="object-auditor" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444412 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="object-auditor" Dec 02 12:16:14 crc kubenswrapper[4721]: E1202 12:16:14.444430 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb4ffe44-922f-47be-8fb4-ff6646c76095" containerName="proxy-httpd" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444436 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb4ffe44-922f-47be-8fb4-ff6646c76095" containerName="proxy-httpd" Dec 02 12:16:14 crc kubenswrapper[4721]: E1202 12:16:14.444444 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="container-server" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444450 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="container-server" Dec 02 12:16:14 crc kubenswrapper[4721]: E1202 12:16:14.444457 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="account-auditor" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444463 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="account-auditor" Dec 02 12:16:14 crc kubenswrapper[4721]: E1202 12:16:14.444473 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="container-updater" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444479 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="container-updater" Dec 02 12:16:14 crc kubenswrapper[4721]: E1202 12:16:14.444489 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="object-updater" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444501 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="object-updater" Dec 02 12:16:14 crc kubenswrapper[4721]: E1202 12:16:14.444510 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="account-replicator" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444515 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="account-replicator" Dec 02 12:16:14 crc kubenswrapper[4721]: E1202 12:16:14.444525 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="container-replicator" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444531 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="container-replicator" Dec 02 12:16:14 crc kubenswrapper[4721]: E1202 12:16:14.444541 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="rsync" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444547 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="rsync" Dec 02 12:16:14 crc kubenswrapper[4721]: E1202 12:16:14.444554 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb4ffe44-922f-47be-8fb4-ff6646c76095" containerName="proxy-server" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444559 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb4ffe44-922f-47be-8fb4-ff6646c76095" containerName="proxy-server" Dec 02 12:16:14 crc kubenswrapper[4721]: E1202 12:16:14.444569 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="account-reaper" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444580 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="account-reaper" Dec 02 12:16:14 crc kubenswrapper[4721]: E1202 12:16:14.444595 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="container-auditor" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444601 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="container-auditor" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444732 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="account-reaper" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444744 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb4ffe44-922f-47be-8fb4-ff6646c76095" containerName="proxy-server" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444754 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="container-replicator" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444761 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="container-auditor" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444770 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="object-expirer" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444780 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="d20da2b0-30a8-4ab9-a299-2328cb57d79b" containerName="swift-ring-rebalance" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444787 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="object-updater" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444797 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="container-server" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444805 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="account-server" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444811 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="container-updater" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444816 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="object-replicator" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444823 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="swift-recon-cron" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444832 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="account-auditor" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444838 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb4ffe44-922f-47be-8fb4-ff6646c76095" containerName="proxy-httpd" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444845 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="object-auditor" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444852 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="account-replicator" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444861 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="object-server" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.444869 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcc8bf37-541f-4278-9946-9f3bc6dd3f98" containerName="rsync" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.445314 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican958c-account-delete-77bvr" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.471235 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican958c-account-delete-77bvr"] Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.482361 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/barbican-api-746f899bcb-ql4pn"] Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.482675 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" podUID="04a81817-b7ad-473c-8a11-a79c9e61bcb7" containerName="barbican-api-log" containerID="cri-o://8b04ec68c42841a803c8710dcf5afc63ab39aca55e3ac7fc8ad62b0c00eb33de" gracePeriod=30 Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.482713 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" podUID="04a81817-b7ad-473c-8a11-a79c9e61bcb7" containerName="barbican-api" containerID="cri-o://749a001f205f44785b1aa789ce5ac84e7b7522c904eb4118466d2a1e1f306c0c" gracePeriod=30 Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.494941 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625"] Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.495251 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625" podUID="ca6ac23f-63bc-4601-abfd-d5a61402f51f" containerName="barbican-keystone-listener-log" containerID="cri-o://064adc3fb0a1656c8f95b1deafca3f5ead1d7812b36af11130552138add457fb" gracePeriod=30 Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.495641 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625" podUID="ca6ac23f-63bc-4601-abfd-d5a61402f51f" containerName="barbican-keystone-listener" containerID="cri-o://387d354c93317bbbf2bf2aeaf2d880a6c8ec047947c845b69c5d348daef62faf" gracePeriod=30 Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.504848 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/barbican-worker-6d756b6777-n645z"] Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.505063 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/barbican-worker-6d756b6777-n645z" podUID="d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5" containerName="barbican-worker-log" containerID="cri-o://e7754533dd8cefe98d4994cc7e37962d8eaa78a097f114de5bc05d6950b71dd7" gracePeriod=30 Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.505324 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/barbican-worker-6d756b6777-n645z" podUID="d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5" containerName="barbican-worker" containerID="cri-o://2a434ede44dd08007f5fdd84c8c299dea5d11d6a5e16cacc130ea92cfb2ed432" gracePeriod=30 Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.527045 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/55ef2351-e56c-4a46-8350-1ed9f1dae8c7-operator-scripts\") pod \"barbican958c-account-delete-77bvr\" (UID: \"55ef2351-e56c-4a46-8350-1ed9f1dae8c7\") " pod="swift-kuttl-tests/barbican958c-account-delete-77bvr" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.527146 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5lkd\" (UniqueName: \"kubernetes.io/projected/55ef2351-e56c-4a46-8350-1ed9f1dae8c7-kube-api-access-c5lkd\") pod \"barbican958c-account-delete-77bvr\" (UID: \"55ef2351-e56c-4a46-8350-1ed9f1dae8c7\") " pod="swift-kuttl-tests/barbican958c-account-delete-77bvr" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.628229 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/55ef2351-e56c-4a46-8350-1ed9f1dae8c7-operator-scripts\") pod \"barbican958c-account-delete-77bvr\" (UID: \"55ef2351-e56c-4a46-8350-1ed9f1dae8c7\") " pod="swift-kuttl-tests/barbican958c-account-delete-77bvr" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.628308 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5lkd\" (UniqueName: \"kubernetes.io/projected/55ef2351-e56c-4a46-8350-1ed9f1dae8c7-kube-api-access-c5lkd\") pod \"barbican958c-account-delete-77bvr\" (UID: \"55ef2351-e56c-4a46-8350-1ed9f1dae8c7\") " pod="swift-kuttl-tests/barbican958c-account-delete-77bvr" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.629454 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/55ef2351-e56c-4a46-8350-1ed9f1dae8c7-operator-scripts\") pod \"barbican958c-account-delete-77bvr\" (UID: \"55ef2351-e56c-4a46-8350-1ed9f1dae8c7\") " pod="swift-kuttl-tests/barbican958c-account-delete-77bvr" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.646776 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5lkd\" (UniqueName: \"kubernetes.io/projected/55ef2351-e56c-4a46-8350-1ed9f1dae8c7-kube-api-access-c5lkd\") pod \"barbican958c-account-delete-77bvr\" (UID: \"55ef2351-e56c-4a46-8350-1ed9f1dae8c7\") " pod="swift-kuttl-tests/barbican958c-account-delete-77bvr" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.770385 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican958c-account-delete-77bvr" Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.783450 4721 generic.go:334] "Generic (PLEG): container finished" podID="04a81817-b7ad-473c-8a11-a79c9e61bcb7" containerID="8b04ec68c42841a803c8710dcf5afc63ab39aca55e3ac7fc8ad62b0c00eb33de" exitCode=143 Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.783600 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" event={"ID":"04a81817-b7ad-473c-8a11-a79c9e61bcb7","Type":"ContainerDied","Data":"8b04ec68c42841a803c8710dcf5afc63ab39aca55e3ac7fc8ad62b0c00eb33de"} Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.785963 4721 generic.go:334] "Generic (PLEG): container finished" podID="ca6ac23f-63bc-4601-abfd-d5a61402f51f" containerID="064adc3fb0a1656c8f95b1deafca3f5ead1d7812b36af11130552138add457fb" exitCode=143 Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.786024 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625" event={"ID":"ca6ac23f-63bc-4601-abfd-d5a61402f51f","Type":"ContainerDied","Data":"064adc3fb0a1656c8f95b1deafca3f5ead1d7812b36af11130552138add457fb"} Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.789244 4721 generic.go:334] "Generic (PLEG): container finished" podID="d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5" containerID="e7754533dd8cefe98d4994cc7e37962d8eaa78a097f114de5bc05d6950b71dd7" exitCode=143 Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.789283 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-worker-6d756b6777-n645z" event={"ID":"d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5","Type":"ContainerDied","Data":"e7754533dd8cefe98d4994cc7e37962d8eaa78a097f114de5bc05d6950b71dd7"} Dec 02 12:16:14 crc kubenswrapper[4721]: I1202 12:16:14.850711 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="911bdab8-6e80-4fb4-8cde-4e5702b37fa2" path="/var/lib/kubelet/pods/911bdab8-6e80-4fb4-8cde-4e5702b37fa2/volumes" Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.218586 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/barbican958c-account-delete-77bvr"] Dec 02 12:16:15 crc kubenswrapper[4721]: W1202 12:16:15.261954 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod55ef2351_e56c_4a46_8350_1ed9f1dae8c7.slice/crio-67f3deec8049a055858da7164c6f3622fb6858b033002a92078c3ce1e54e8c81 WatchSource:0}: Error finding container 67f3deec8049a055858da7164c6f3622fb6858b033002a92078c3ce1e54e8c81: Status 404 returned error can't find the container with id 67f3deec8049a055858da7164c6f3622fb6858b033002a92078c3ce1e54e8c81 Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.402496 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-worker-6d756b6777-n645z" Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.447111 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dq86b\" (UniqueName: \"kubernetes.io/projected/d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5-kube-api-access-dq86b\") pod \"d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5\" (UID: \"d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5\") " Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.447258 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5-config-data-custom\") pod \"d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5\" (UID: \"d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5\") " Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.447313 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5-config-data\") pod \"d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5\" (UID: \"d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5\") " Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.447427 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5-logs\") pod \"d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5\" (UID: \"d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5\") " Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.448871 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5-logs" (OuterVolumeSpecName: "logs") pod "d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5" (UID: "d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.459588 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5" (UID: "d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.464161 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5-kube-api-access-dq86b" (OuterVolumeSpecName: "kube-api-access-dq86b") pod "d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5" (UID: "d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5"). InnerVolumeSpecName "kube-api-access-dq86b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.494090 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5-config-data" (OuterVolumeSpecName: "config-data") pod "d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5" (UID: "d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.549938 4721 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.550629 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.550639 4721 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5-logs\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.550649 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dq86b\" (UniqueName: \"kubernetes.io/projected/d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5-kube-api-access-dq86b\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.595452 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/keystone-db-sync-r2n9m"] Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.607523 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/keystone-db-sync-r2n9m"] Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.610924 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/keystone-78b88b8749-htdrf"] Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.611158 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/keystone-78b88b8749-htdrf" podUID="605042d5-6e07-4037-8a8c-0ca3da0d2ac3" containerName="keystone-api" containerID="cri-o://e919556c6b839b7be54606fc02688391443d37428cca6d7e107bca50c848a403" gracePeriod=30 Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.620480 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/keystone-bootstrap-sbkb7"] Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.633306 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/keystone-bootstrap-sbkb7"] Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.672069 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["swift-kuttl-tests/keystone1088-account-delete-vlzc9"] Dec 02 12:16:15 crc kubenswrapper[4721]: E1202 12:16:15.672471 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5" containerName="barbican-worker" Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.672486 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5" containerName="barbican-worker" Dec 02 12:16:15 crc kubenswrapper[4721]: E1202 12:16:15.672515 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5" containerName="barbican-worker-log" Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.672522 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5" containerName="barbican-worker-log" Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.672648 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5" containerName="barbican-worker-log" Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.672661 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5" containerName="barbican-worker" Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.673118 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone1088-account-delete-vlzc9" Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.678274 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone1088-account-delete-vlzc9"] Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.753722 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9h7s5\" (UniqueName: \"kubernetes.io/projected/7349d6cc-58af-4d67-a865-31d8409c29cd-kube-api-access-9h7s5\") pod \"keystone1088-account-delete-vlzc9\" (UID: \"7349d6cc-58af-4d67-a865-31d8409c29cd\") " pod="swift-kuttl-tests/keystone1088-account-delete-vlzc9" Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.753783 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7349d6cc-58af-4d67-a865-31d8409c29cd-operator-scripts\") pod \"keystone1088-account-delete-vlzc9\" (UID: \"7349d6cc-58af-4d67-a865-31d8409c29cd\") " pod="swift-kuttl-tests/keystone1088-account-delete-vlzc9" Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.797371 4721 generic.go:334] "Generic (PLEG): container finished" podID="ca6ac23f-63bc-4601-abfd-d5a61402f51f" containerID="387d354c93317bbbf2bf2aeaf2d880a6c8ec047947c845b69c5d348daef62faf" exitCode=0 Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.797422 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625" event={"ID":"ca6ac23f-63bc-4601-abfd-d5a61402f51f","Type":"ContainerDied","Data":"387d354c93317bbbf2bf2aeaf2d880a6c8ec047947c845b69c5d348daef62faf"} Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.798922 4721 generic.go:334] "Generic (PLEG): container finished" podID="d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5" containerID="2a434ede44dd08007f5fdd84c8c299dea5d11d6a5e16cacc130ea92cfb2ed432" exitCode=0 Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.798960 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-worker-6d756b6777-n645z" event={"ID":"d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5","Type":"ContainerDied","Data":"2a434ede44dd08007f5fdd84c8c299dea5d11d6a5e16cacc130ea92cfb2ed432"} Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.798975 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-worker-6d756b6777-n645z" event={"ID":"d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5","Type":"ContainerDied","Data":"1b18c094746ff3573472ba57776c5acd08ee68c465d4e1740cfd98d1b970c18f"} Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.798990 4721 scope.go:117] "RemoveContainer" containerID="2a434ede44dd08007f5fdd84c8c299dea5d11d6a5e16cacc130ea92cfb2ed432" Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.799096 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-worker-6d756b6777-n645z" Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.802154 4721 generic.go:334] "Generic (PLEG): container finished" podID="55ef2351-e56c-4a46-8350-1ed9f1dae8c7" containerID="3478b9da40dbe812afcf1a71b6fe29bbb14d1c384071998f05bcf6875780e41f" exitCode=0 Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.802210 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican958c-account-delete-77bvr" event={"ID":"55ef2351-e56c-4a46-8350-1ed9f1dae8c7","Type":"ContainerDied","Data":"3478b9da40dbe812afcf1a71b6fe29bbb14d1c384071998f05bcf6875780e41f"} Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.802804 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican958c-account-delete-77bvr" event={"ID":"55ef2351-e56c-4a46-8350-1ed9f1dae8c7","Type":"ContainerStarted","Data":"67f3deec8049a055858da7164c6f3622fb6858b033002a92078c3ce1e54e8c81"} Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.826004 4721 scope.go:117] "RemoveContainer" containerID="e7754533dd8cefe98d4994cc7e37962d8eaa78a097f114de5bc05d6950b71dd7" Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.833063 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/barbican-worker-6d756b6777-n645z"] Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.837512 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/barbican-worker-6d756b6777-n645z"] Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.848546 4721 scope.go:117] "RemoveContainer" containerID="2a434ede44dd08007f5fdd84c8c299dea5d11d6a5e16cacc130ea92cfb2ed432" Dec 02 12:16:15 crc kubenswrapper[4721]: E1202 12:16:15.849288 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a434ede44dd08007f5fdd84c8c299dea5d11d6a5e16cacc130ea92cfb2ed432\": container with ID starting with 2a434ede44dd08007f5fdd84c8c299dea5d11d6a5e16cacc130ea92cfb2ed432 not found: ID does not exist" containerID="2a434ede44dd08007f5fdd84c8c299dea5d11d6a5e16cacc130ea92cfb2ed432" Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.849359 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a434ede44dd08007f5fdd84c8c299dea5d11d6a5e16cacc130ea92cfb2ed432"} err="failed to get container status \"2a434ede44dd08007f5fdd84c8c299dea5d11d6a5e16cacc130ea92cfb2ed432\": rpc error: code = NotFound desc = could not find container \"2a434ede44dd08007f5fdd84c8c299dea5d11d6a5e16cacc130ea92cfb2ed432\": container with ID starting with 2a434ede44dd08007f5fdd84c8c299dea5d11d6a5e16cacc130ea92cfb2ed432 not found: ID does not exist" Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.849392 4721 scope.go:117] "RemoveContainer" containerID="e7754533dd8cefe98d4994cc7e37962d8eaa78a097f114de5bc05d6950b71dd7" Dec 02 12:16:15 crc kubenswrapper[4721]: E1202 12:16:15.849951 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7754533dd8cefe98d4994cc7e37962d8eaa78a097f114de5bc05d6950b71dd7\": container with ID starting with e7754533dd8cefe98d4994cc7e37962d8eaa78a097f114de5bc05d6950b71dd7 not found: ID does not exist" containerID="e7754533dd8cefe98d4994cc7e37962d8eaa78a097f114de5bc05d6950b71dd7" Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.850012 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7754533dd8cefe98d4994cc7e37962d8eaa78a097f114de5bc05d6950b71dd7"} err="failed to get container status \"e7754533dd8cefe98d4994cc7e37962d8eaa78a097f114de5bc05d6950b71dd7\": rpc error: code = NotFound desc = could not find container \"e7754533dd8cefe98d4994cc7e37962d8eaa78a097f114de5bc05d6950b71dd7\": container with ID starting with e7754533dd8cefe98d4994cc7e37962d8eaa78a097f114de5bc05d6950b71dd7 not found: ID does not exist" Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.855149 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9h7s5\" (UniqueName: \"kubernetes.io/projected/7349d6cc-58af-4d67-a865-31d8409c29cd-kube-api-access-9h7s5\") pod \"keystone1088-account-delete-vlzc9\" (UID: \"7349d6cc-58af-4d67-a865-31d8409c29cd\") " pod="swift-kuttl-tests/keystone1088-account-delete-vlzc9" Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.855213 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7349d6cc-58af-4d67-a865-31d8409c29cd-operator-scripts\") pod \"keystone1088-account-delete-vlzc9\" (UID: \"7349d6cc-58af-4d67-a865-31d8409c29cd\") " pod="swift-kuttl-tests/keystone1088-account-delete-vlzc9" Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.856568 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7349d6cc-58af-4d67-a865-31d8409c29cd-operator-scripts\") pod \"keystone1088-account-delete-vlzc9\" (UID: \"7349d6cc-58af-4d67-a865-31d8409c29cd\") " pod="swift-kuttl-tests/keystone1088-account-delete-vlzc9" Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.873996 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9h7s5\" (UniqueName: \"kubernetes.io/projected/7349d6cc-58af-4d67-a865-31d8409c29cd-kube-api-access-9h7s5\") pod \"keystone1088-account-delete-vlzc9\" (UID: \"7349d6cc-58af-4d67-a865-31d8409c29cd\") " pod="swift-kuttl-tests/keystone1088-account-delete-vlzc9" Dec 02 12:16:15 crc kubenswrapper[4721]: I1202 12:16:15.924910 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625" Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.057543 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca6ac23f-63bc-4601-abfd-d5a61402f51f-logs\") pod \"ca6ac23f-63bc-4601-abfd-d5a61402f51f\" (UID: \"ca6ac23f-63bc-4601-abfd-d5a61402f51f\") " Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.057616 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ca6ac23f-63bc-4601-abfd-d5a61402f51f-config-data-custom\") pod \"ca6ac23f-63bc-4601-abfd-d5a61402f51f\" (UID: \"ca6ac23f-63bc-4601-abfd-d5a61402f51f\") " Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.057650 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca6ac23f-63bc-4601-abfd-d5a61402f51f-config-data\") pod \"ca6ac23f-63bc-4601-abfd-d5a61402f51f\" (UID: \"ca6ac23f-63bc-4601-abfd-d5a61402f51f\") " Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.057753 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f4ghc\" (UniqueName: \"kubernetes.io/projected/ca6ac23f-63bc-4601-abfd-d5a61402f51f-kube-api-access-f4ghc\") pod \"ca6ac23f-63bc-4601-abfd-d5a61402f51f\" (UID: \"ca6ac23f-63bc-4601-abfd-d5a61402f51f\") " Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.058460 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca6ac23f-63bc-4601-abfd-d5a61402f51f-logs" (OuterVolumeSpecName: "logs") pod "ca6ac23f-63bc-4601-abfd-d5a61402f51f" (UID: "ca6ac23f-63bc-4601-abfd-d5a61402f51f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.063563 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca6ac23f-63bc-4601-abfd-d5a61402f51f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ca6ac23f-63bc-4601-abfd-d5a61402f51f" (UID: "ca6ac23f-63bc-4601-abfd-d5a61402f51f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.064270 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca6ac23f-63bc-4601-abfd-d5a61402f51f-kube-api-access-f4ghc" (OuterVolumeSpecName: "kube-api-access-f4ghc") pod "ca6ac23f-63bc-4601-abfd-d5a61402f51f" (UID: "ca6ac23f-63bc-4601-abfd-d5a61402f51f"). InnerVolumeSpecName "kube-api-access-f4ghc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.089849 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca6ac23f-63bc-4601-abfd-d5a61402f51f-config-data" (OuterVolumeSpecName: "config-data") pod "ca6ac23f-63bc-4601-abfd-d5a61402f51f" (UID: "ca6ac23f-63bc-4601-abfd-d5a61402f51f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.094148 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone1088-account-delete-vlzc9" Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.159481 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f4ghc\" (UniqueName: \"kubernetes.io/projected/ca6ac23f-63bc-4601-abfd-d5a61402f51f-kube-api-access-f4ghc\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.159518 4721 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca6ac23f-63bc-4601-abfd-d5a61402f51f-logs\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.159528 4721 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ca6ac23f-63bc-4601-abfd-d5a61402f51f-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.159537 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca6ac23f-63bc-4601-abfd-d5a61402f51f-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.415606 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/openstack-galera-1"] Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.421534 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/openstack-galera-0"] Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.426386 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/openstack-galera-2"] Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.501626 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/keystone1088-account-delete-vlzc9"] Dec 02 12:16:16 crc kubenswrapper[4721]: W1202 12:16:16.510616 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7349d6cc_58af_4d67_a865_31d8409c29cd.slice/crio-9e23c88cd28bae65f7c7fcc8d6e481288c76aa71539a007e92ce51639af9d790 WatchSource:0}: Error finding container 9e23c88cd28bae65f7c7fcc8d6e481288c76aa71539a007e92ce51639af9d790: Status 404 returned error can't find the container with id 9e23c88cd28bae65f7c7fcc8d6e481288c76aa71539a007e92ce51639af9d790 Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.567190 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/openstack-galera-2" podUID="69d9011a-8441-4afa-904b-706f5a0ffe6c" containerName="galera" containerID="cri-o://e5f72d9edc92b63386400f754f50f6b93a0bbde29c05804cef4803633ed756bd" gracePeriod=30 Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.809370 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625" event={"ID":"ca6ac23f-63bc-4601-abfd-d5a61402f51f","Type":"ContainerDied","Data":"a0deff91528ede51a37da0888bc6d3fe7c45e0f84c991b9a1835f4fabdd7190e"} Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.809412 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625" Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.809449 4721 scope.go:117] "RemoveContainer" containerID="387d354c93317bbbf2bf2aeaf2d880a6c8ec047947c845b69c5d348daef62faf" Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.812143 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone1088-account-delete-vlzc9" event={"ID":"7349d6cc-58af-4d67-a865-31d8409c29cd","Type":"ContainerStarted","Data":"cbdd31746d6f20e86a32ba99fee61c3e9dadc5f76ccd0ff940a41d5055c5aa11"} Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.812251 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone1088-account-delete-vlzc9" event={"ID":"7349d6cc-58af-4d67-a865-31d8409c29cd","Type":"ContainerStarted","Data":"9e23c88cd28bae65f7c7fcc8d6e481288c76aa71539a007e92ce51639af9d790"} Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.812650 4721 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="swift-kuttl-tests/keystone1088-account-delete-vlzc9" secret="" err="secret \"galera-openstack-dockercfg-p5jb4\" not found" Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.828564 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="swift-kuttl-tests/keystone1088-account-delete-vlzc9" podStartSLOduration=1.828543089 podStartE2EDuration="1.828543089s" podCreationTimestamp="2025-12-02 12:16:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:16:16.823804853 +0000 UTC m=+1506.562355202" watchObservedRunningTime="2025-12-02 12:16:16.828543089 +0000 UTC m=+1506.567093448" Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.835873 4721 scope.go:117] "RemoveContainer" containerID="064adc3fb0a1656c8f95b1deafca3f5ead1d7812b36af11130552138add457fb" Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.855003 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cbe54636-ef0a-4d0d-bcbd-1990e8433d04" path="/var/lib/kubelet/pods/cbe54636-ef0a-4d0d-bcbd-1990e8433d04/volumes" Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.856156 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5" path="/var/lib/kubelet/pods/d8ddc2f8-ddc4-4730-9b03-f81a3eece4a5/volumes" Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.857092 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc273c30-90d7-4b50-bf60-0e3292eed557" path="/var/lib/kubelet/pods/fc273c30-90d7-4b50-bf60-0e3292eed557/volumes" Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.857775 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625"] Dec 02 12:16:16 crc kubenswrapper[4721]: I1202 12:16:16.857811 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/barbican-keystone-listener-69fbfdbb48-s7625"] Dec 02 12:16:16 crc kubenswrapper[4721]: E1202 12:16:16.869127 4721 configmap.go:193] Couldn't get configMap swift-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Dec 02 12:16:16 crc kubenswrapper[4721]: E1202 12:16:16.869202 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7349d6cc-58af-4d67-a865-31d8409c29cd-operator-scripts podName:7349d6cc-58af-4d67-a865-31d8409c29cd nodeName:}" failed. No retries permitted until 2025-12-02 12:16:17.369182671 +0000 UTC m=+1507.107733020 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/7349d6cc-58af-4d67-a865-31d8409c29cd-operator-scripts") pod "keystone1088-account-delete-vlzc9" (UID: "7349d6cc-58af-4d67-a865-31d8409c29cd") : configmap "openstack-scripts" not found Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.072716 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican958c-account-delete-77bvr" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.079847 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/memcached-0"] Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.080096 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/memcached-0" podUID="5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93" containerName="memcached" containerID="cri-o://fdf853adc3af7a138953946a1a769d8a059b52b64d216157b4d6da2c575ce21a" gracePeriod=30 Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.172630 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5lkd\" (UniqueName: \"kubernetes.io/projected/55ef2351-e56c-4a46-8350-1ed9f1dae8c7-kube-api-access-c5lkd\") pod \"55ef2351-e56c-4a46-8350-1ed9f1dae8c7\" (UID: \"55ef2351-e56c-4a46-8350-1ed9f1dae8c7\") " Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.172783 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/55ef2351-e56c-4a46-8350-1ed9f1dae8c7-operator-scripts\") pod \"55ef2351-e56c-4a46-8350-1ed9f1dae8c7\" (UID: \"55ef2351-e56c-4a46-8350-1ed9f1dae8c7\") " Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.173403 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/55ef2351-e56c-4a46-8350-1ed9f1dae8c7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "55ef2351-e56c-4a46-8350-1ed9f1dae8c7" (UID: "55ef2351-e56c-4a46-8350-1ed9f1dae8c7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.177722 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55ef2351-e56c-4a46-8350-1ed9f1dae8c7-kube-api-access-c5lkd" (OuterVolumeSpecName: "kube-api-access-c5lkd") pod "55ef2351-e56c-4a46-8350-1ed9f1dae8c7" (UID: "55ef2351-e56c-4a46-8350-1ed9f1dae8c7"). InnerVolumeSpecName "kube-api-access-c5lkd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.274665 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/55ef2351-e56c-4a46-8350-1ed9f1dae8c7-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.274694 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5lkd\" (UniqueName: \"kubernetes.io/projected/55ef2351-e56c-4a46-8350-1ed9f1dae8c7-kube-api-access-c5lkd\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:17 crc kubenswrapper[4721]: E1202 12:16:17.376654 4721 configmap.go:193] Couldn't get configMap swift-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Dec 02 12:16:17 crc kubenswrapper[4721]: E1202 12:16:17.376751 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7349d6cc-58af-4d67-a865-31d8409c29cd-operator-scripts podName:7349d6cc-58af-4d67-a865-31d8409c29cd nodeName:}" failed. No retries permitted until 2025-12-02 12:16:18.376728039 +0000 UTC m=+1508.115278418 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/7349d6cc-58af-4d67-a865-31d8409c29cd-operator-scripts") pod "keystone1088-account-delete-vlzc9" (UID: "7349d6cc-58af-4d67-a865-31d8409c29cd") : configmap "openstack-scripts" not found Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.421139 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-2" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.477741 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jrkb8\" (UniqueName: \"kubernetes.io/projected/69d9011a-8441-4afa-904b-706f5a0ffe6c-kube-api-access-jrkb8\") pod \"69d9011a-8441-4afa-904b-706f5a0ffe6c\" (UID: \"69d9011a-8441-4afa-904b-706f5a0ffe6c\") " Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.477804 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"69d9011a-8441-4afa-904b-706f5a0ffe6c\" (UID: \"69d9011a-8441-4afa-904b-706f5a0ffe6c\") " Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.477862 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/69d9011a-8441-4afa-904b-706f5a0ffe6c-config-data-default\") pod \"69d9011a-8441-4afa-904b-706f5a0ffe6c\" (UID: \"69d9011a-8441-4afa-904b-706f5a0ffe6c\") " Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.477935 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69d9011a-8441-4afa-904b-706f5a0ffe6c-operator-scripts\") pod \"69d9011a-8441-4afa-904b-706f5a0ffe6c\" (UID: \"69d9011a-8441-4afa-904b-706f5a0ffe6c\") " Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.477965 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/69d9011a-8441-4afa-904b-706f5a0ffe6c-kolla-config\") pod \"69d9011a-8441-4afa-904b-706f5a0ffe6c\" (UID: \"69d9011a-8441-4afa-904b-706f5a0ffe6c\") " Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.478046 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/69d9011a-8441-4afa-904b-706f5a0ffe6c-config-data-generated\") pod \"69d9011a-8441-4afa-904b-706f5a0ffe6c\" (UID: \"69d9011a-8441-4afa-904b-706f5a0ffe6c\") " Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.478499 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69d9011a-8441-4afa-904b-706f5a0ffe6c-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "69d9011a-8441-4afa-904b-706f5a0ffe6c" (UID: "69d9011a-8441-4afa-904b-706f5a0ffe6c"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.478515 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69d9011a-8441-4afa-904b-706f5a0ffe6c-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "69d9011a-8441-4afa-904b-706f5a0ffe6c" (UID: "69d9011a-8441-4afa-904b-706f5a0ffe6c"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.478559 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69d9011a-8441-4afa-904b-706f5a0ffe6c-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "69d9011a-8441-4afa-904b-706f5a0ffe6c" (UID: "69d9011a-8441-4afa-904b-706f5a0ffe6c"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.478725 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69d9011a-8441-4afa-904b-706f5a0ffe6c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "69d9011a-8441-4afa-904b-706f5a0ffe6c" (UID: "69d9011a-8441-4afa-904b-706f5a0ffe6c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.486188 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69d9011a-8441-4afa-904b-706f5a0ffe6c-kube-api-access-jrkb8" (OuterVolumeSpecName: "kube-api-access-jrkb8") pod "69d9011a-8441-4afa-904b-706f5a0ffe6c" (UID: "69d9011a-8441-4afa-904b-706f5a0ffe6c"). InnerVolumeSpecName "kube-api-access-jrkb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.486586 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["swift-kuttl-tests/rabbitmq-server-0"] Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.487680 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "mysql-db") pod "69d9011a-8441-4afa-904b-706f5a0ffe6c" (UID: "69d9011a-8441-4afa-904b-706f5a0ffe6c"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.580331 4721 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/69d9011a-8441-4afa-904b-706f5a0ffe6c-config-data-generated\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.580494 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jrkb8\" (UniqueName: \"kubernetes.io/projected/69d9011a-8441-4afa-904b-706f5a0ffe6c-kube-api-access-jrkb8\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.580543 4721 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.580567 4721 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/69d9011a-8441-4afa-904b-706f5a0ffe6c-config-data-default\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.580581 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69d9011a-8441-4afa-904b-706f5a0ffe6c-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.580592 4721 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/69d9011a-8441-4afa-904b-706f5a0ffe6c-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.601357 4721 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.645841 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" podUID="04a81817-b7ad-473c-8a11-a79c9e61bcb7" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.94:9311/healthcheck\": read tcp 10.217.0.2:52306->10.217.0.94:9311: read: connection reset by peer" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.646174 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" podUID="04a81817-b7ad-473c-8a11-a79c9e61bcb7" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.94:9311/healthcheck\": read tcp 10.217.0.2:52322->10.217.0.94:9311: read: connection reset by peer" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.681907 4721 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.822657 4721 generic.go:334] "Generic (PLEG): container finished" podID="69d9011a-8441-4afa-904b-706f5a0ffe6c" containerID="e5f72d9edc92b63386400f754f50f6b93a0bbde29c05804cef4803633ed756bd" exitCode=0 Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.822723 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-2" event={"ID":"69d9011a-8441-4afa-904b-706f5a0ffe6c","Type":"ContainerDied","Data":"e5f72d9edc92b63386400f754f50f6b93a0bbde29c05804cef4803633ed756bd"} Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.822754 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-2" event={"ID":"69d9011a-8441-4afa-904b-706f5a0ffe6c","Type":"ContainerDied","Data":"0079ead3d7edd32fe163cd52005d8e9b967f39cc1cc8621968264f8065c39bf3"} Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.822775 4721 scope.go:117] "RemoveContainer" containerID="e5f72d9edc92b63386400f754f50f6b93a0bbde29c05804cef4803633ed756bd" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.822929 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-2" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.861370 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican958c-account-delete-77bvr" event={"ID":"55ef2351-e56c-4a46-8350-1ed9f1dae8c7","Type":"ContainerDied","Data":"67f3deec8049a055858da7164c6f3622fb6858b033002a92078c3ce1e54e8c81"} Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.861389 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican958c-account-delete-77bvr" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.861407 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="67f3deec8049a055858da7164c6f3622fb6858b033002a92078c3ce1e54e8c81" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.865495 4721 generic.go:334] "Generic (PLEG): container finished" podID="04a81817-b7ad-473c-8a11-a79c9e61bcb7" containerID="749a001f205f44785b1aa789ce5ac84e7b7522c904eb4118466d2a1e1f306c0c" exitCode=0 Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.865550 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" event={"ID":"04a81817-b7ad-473c-8a11-a79c9e61bcb7","Type":"ContainerDied","Data":"749a001f205f44785b1aa789ce5ac84e7b7522c904eb4118466d2a1e1f306c0c"} Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.868484 4721 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="swift-kuttl-tests/keystone1088-account-delete-vlzc9" secret="" err="secret \"galera-openstack-dockercfg-p5jb4\" not found" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.868964 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/rabbitmq-server-0"] Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.877412 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/openstack-galera-2"] Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.879714 4721 scope.go:117] "RemoveContainer" containerID="312c29e2155e60a4937c7648fbbb3c5e46447fe6928cde19289282af63e58102" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.886996 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/openstack-galera-2"] Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.895616 4721 scope.go:117] "RemoveContainer" containerID="e5f72d9edc92b63386400f754f50f6b93a0bbde29c05804cef4803633ed756bd" Dec 02 12:16:17 crc kubenswrapper[4721]: E1202 12:16:17.895891 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5f72d9edc92b63386400f754f50f6b93a0bbde29c05804cef4803633ed756bd\": container with ID starting with e5f72d9edc92b63386400f754f50f6b93a0bbde29c05804cef4803633ed756bd not found: ID does not exist" containerID="e5f72d9edc92b63386400f754f50f6b93a0bbde29c05804cef4803633ed756bd" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.895927 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5f72d9edc92b63386400f754f50f6b93a0bbde29c05804cef4803633ed756bd"} err="failed to get container status \"e5f72d9edc92b63386400f754f50f6b93a0bbde29c05804cef4803633ed756bd\": rpc error: code = NotFound desc = could not find container \"e5f72d9edc92b63386400f754f50f6b93a0bbde29c05804cef4803633ed756bd\": container with ID starting with e5f72d9edc92b63386400f754f50f6b93a0bbde29c05804cef4803633ed756bd not found: ID does not exist" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.895946 4721 scope.go:117] "RemoveContainer" containerID="312c29e2155e60a4937c7648fbbb3c5e46447fe6928cde19289282af63e58102" Dec 02 12:16:17 crc kubenswrapper[4721]: E1202 12:16:17.896299 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"312c29e2155e60a4937c7648fbbb3c5e46447fe6928cde19289282af63e58102\": container with ID starting with 312c29e2155e60a4937c7648fbbb3c5e46447fe6928cde19289282af63e58102 not found: ID does not exist" containerID="312c29e2155e60a4937c7648fbbb3c5e46447fe6928cde19289282af63e58102" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.896313 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"312c29e2155e60a4937c7648fbbb3c5e46447fe6928cde19289282af63e58102"} err="failed to get container status \"312c29e2155e60a4937c7648fbbb3c5e46447fe6928cde19289282af63e58102\": rpc error: code = NotFound desc = could not find container \"312c29e2155e60a4937c7648fbbb3c5e46447fe6928cde19289282af63e58102\": container with ID starting with 312c29e2155e60a4937c7648fbbb3c5e46447fe6928cde19289282af63e58102 not found: ID does not exist" Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.910006 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/rabbitmq-server-0" podUID="89920af2-3af7-4428-80fa-ddf2ba97dfb9" containerName="rabbitmq" containerID="cri-o://3cc5505dcaf4c8eed98a80d0bcc290be4d06c7a19329d3cb28c29862fb1f1188" gracePeriod=604800 Dec 02 12:16:17 crc kubenswrapper[4721]: I1202 12:16:17.978839 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" Dec 02 12:16:18 crc kubenswrapper[4721]: I1202 12:16:18.085442 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04a81817-b7ad-473c-8a11-a79c9e61bcb7-config-data\") pod \"04a81817-b7ad-473c-8a11-a79c9e61bcb7\" (UID: \"04a81817-b7ad-473c-8a11-a79c9e61bcb7\") " Dec 02 12:16:18 crc kubenswrapper[4721]: I1202 12:16:18.085521 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04a81817-b7ad-473c-8a11-a79c9e61bcb7-logs\") pod \"04a81817-b7ad-473c-8a11-a79c9e61bcb7\" (UID: \"04a81817-b7ad-473c-8a11-a79c9e61bcb7\") " Dec 02 12:16:18 crc kubenswrapper[4721]: I1202 12:16:18.085555 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mjghn\" (UniqueName: \"kubernetes.io/projected/04a81817-b7ad-473c-8a11-a79c9e61bcb7-kube-api-access-mjghn\") pod \"04a81817-b7ad-473c-8a11-a79c9e61bcb7\" (UID: \"04a81817-b7ad-473c-8a11-a79c9e61bcb7\") " Dec 02 12:16:18 crc kubenswrapper[4721]: I1202 12:16:18.085661 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/04a81817-b7ad-473c-8a11-a79c9e61bcb7-config-data-custom\") pod \"04a81817-b7ad-473c-8a11-a79c9e61bcb7\" (UID: \"04a81817-b7ad-473c-8a11-a79c9e61bcb7\") " Dec 02 12:16:18 crc kubenswrapper[4721]: I1202 12:16:18.086452 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04a81817-b7ad-473c-8a11-a79c9e61bcb7-logs" (OuterVolumeSpecName: "logs") pod "04a81817-b7ad-473c-8a11-a79c9e61bcb7" (UID: "04a81817-b7ad-473c-8a11-a79c9e61bcb7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:16:18 crc kubenswrapper[4721]: I1202 12:16:18.089817 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04a81817-b7ad-473c-8a11-a79c9e61bcb7-kube-api-access-mjghn" (OuterVolumeSpecName: "kube-api-access-mjghn") pod "04a81817-b7ad-473c-8a11-a79c9e61bcb7" (UID: "04a81817-b7ad-473c-8a11-a79c9e61bcb7"). InnerVolumeSpecName "kube-api-access-mjghn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:16:18 crc kubenswrapper[4721]: I1202 12:16:18.090583 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04a81817-b7ad-473c-8a11-a79c9e61bcb7-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "04a81817-b7ad-473c-8a11-a79c9e61bcb7" (UID: "04a81817-b7ad-473c-8a11-a79c9e61bcb7"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:16:18 crc kubenswrapper[4721]: I1202 12:16:18.146550 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04a81817-b7ad-473c-8a11-a79c9e61bcb7-config-data" (OuterVolumeSpecName: "config-data") pod "04a81817-b7ad-473c-8a11-a79c9e61bcb7" (UID: "04a81817-b7ad-473c-8a11-a79c9e61bcb7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:16:18 crc kubenswrapper[4721]: I1202 12:16:18.192107 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04a81817-b7ad-473c-8a11-a79c9e61bcb7-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:18 crc kubenswrapper[4721]: I1202 12:16:18.192153 4721 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04a81817-b7ad-473c-8a11-a79c9e61bcb7-logs\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:18 crc kubenswrapper[4721]: I1202 12:16:18.192161 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mjghn\" (UniqueName: \"kubernetes.io/projected/04a81817-b7ad-473c-8a11-a79c9e61bcb7-kube-api-access-mjghn\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:18 crc kubenswrapper[4721]: I1202 12:16:18.192171 4721 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/04a81817-b7ad-473c-8a11-a79c9e61bcb7-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:18 crc kubenswrapper[4721]: E1202 12:16:18.395281 4721 configmap.go:193] Couldn't get configMap swift-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Dec 02 12:16:18 crc kubenswrapper[4721]: E1202 12:16:18.395379 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7349d6cc-58af-4d67-a865-31d8409c29cd-operator-scripts podName:7349d6cc-58af-4d67-a865-31d8409c29cd nodeName:}" failed. No retries permitted until 2025-12-02 12:16:20.395361014 +0000 UTC m=+1510.133911363 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/7349d6cc-58af-4d67-a865-31d8409c29cd-operator-scripts") pod "keystone1088-account-delete-vlzc9" (UID: "7349d6cc-58af-4d67-a865-31d8409c29cd") : configmap "openstack-scripts" not found Dec 02 12:16:18 crc kubenswrapper[4721]: I1202 12:16:18.568833 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/openstack-galera-1" podUID="91d5a64b-5c90-4fba-9e6c-416071e412b3" containerName="galera" containerID="cri-o://1e594e97f790515c0f3aad49aef85f5150f7494e2dba574c1661a9c5ca3bdb09" gracePeriod=28 Dec 02 12:16:18 crc kubenswrapper[4721]: E1202 12:16:18.801005 4721 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1e594e97f790515c0f3aad49aef85f5150f7494e2dba574c1661a9c5ca3bdb09" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Dec 02 12:16:18 crc kubenswrapper[4721]: E1202 12:16:18.802742 4721 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1e594e97f790515c0f3aad49aef85f5150f7494e2dba574c1661a9c5ca3bdb09" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Dec 02 12:16:18 crc kubenswrapper[4721]: E1202 12:16:18.803932 4721 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1e594e97f790515c0f3aad49aef85f5150f7494e2dba574c1661a9c5ca3bdb09" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Dec 02 12:16:18 crc kubenswrapper[4721]: E1202 12:16:18.803975 4721 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="swift-kuttl-tests/openstack-galera-1" podUID="91d5a64b-5c90-4fba-9e6c-416071e412b3" containerName="galera" Dec 02 12:16:18 crc kubenswrapper[4721]: I1202 12:16:18.848379 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69d9011a-8441-4afa-904b-706f5a0ffe6c" path="/var/lib/kubelet/pods/69d9011a-8441-4afa-904b-706f5a0ffe6c/volumes" Dec 02 12:16:18 crc kubenswrapper[4721]: I1202 12:16:18.849337 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca6ac23f-63bc-4601-abfd-d5a61402f51f" path="/var/lib/kubelet/pods/ca6ac23f-63bc-4601-abfd-d5a61402f51f/volumes" Dec 02 12:16:18 crc kubenswrapper[4721]: I1202 12:16:18.875038 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" Dec 02 12:16:18 crc kubenswrapper[4721]: I1202 12:16:18.875036 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/barbican-api-746f899bcb-ql4pn" event={"ID":"04a81817-b7ad-473c-8a11-a79c9e61bcb7","Type":"ContainerDied","Data":"9ad16c65327cbc3cef461309012a20f8d647a6b8db3bfa04f117bb78da6dca5e"} Dec 02 12:16:18 crc kubenswrapper[4721]: I1202 12:16:18.875400 4721 scope.go:117] "RemoveContainer" containerID="749a001f205f44785b1aa789ce5ac84e7b7522c904eb4118466d2a1e1f306c0c" Dec 02 12:16:18 crc kubenswrapper[4721]: I1202 12:16:18.881734 4721 generic.go:334] "Generic (PLEG): container finished" podID="605042d5-6e07-4037-8a8c-0ca3da0d2ac3" containerID="e919556c6b839b7be54606fc02688391443d37428cca6d7e107bca50c848a403" exitCode=0 Dec 02 12:16:18 crc kubenswrapper[4721]: I1202 12:16:18.881776 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-78b88b8749-htdrf" event={"ID":"605042d5-6e07-4037-8a8c-0ca3da0d2ac3","Type":"ContainerDied","Data":"e919556c6b839b7be54606fc02688391443d37428cca6d7e107bca50c848a403"} Dec 02 12:16:18 crc kubenswrapper[4721]: I1202 12:16:18.895684 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/barbican-api-746f899bcb-ql4pn"] Dec 02 12:16:18 crc kubenswrapper[4721]: I1202 12:16:18.896642 4721 scope.go:117] "RemoveContainer" containerID="8b04ec68c42841a803c8710dcf5afc63ab39aca55e3ac7fc8ad62b0c00eb33de" Dec 02 12:16:18 crc kubenswrapper[4721]: I1202 12:16:18.904252 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/barbican-api-746f899bcb-ql4pn"] Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.171356 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-78b88b8749-htdrf" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.237423 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/memcached-0" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.305805 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93-kolla-config\") pod \"5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93\" (UID: \"5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93\") " Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.305853 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-config-data\") pod \"605042d5-6e07-4037-8a8c-0ca3da0d2ac3\" (UID: \"605042d5-6e07-4037-8a8c-0ca3da0d2ac3\") " Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.305896 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2rkqz\" (UniqueName: \"kubernetes.io/projected/5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93-kube-api-access-2rkqz\") pod \"5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93\" (UID: \"5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93\") " Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.305918 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nbqfs\" (UniqueName: \"kubernetes.io/projected/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-kube-api-access-nbqfs\") pod \"605042d5-6e07-4037-8a8c-0ca3da0d2ac3\" (UID: \"605042d5-6e07-4037-8a8c-0ca3da0d2ac3\") " Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.305949 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93-config-data\") pod \"5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93\" (UID: \"5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93\") " Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.305967 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-credential-keys\") pod \"605042d5-6e07-4037-8a8c-0ca3da0d2ac3\" (UID: \"605042d5-6e07-4037-8a8c-0ca3da0d2ac3\") " Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.306023 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-fernet-keys\") pod \"605042d5-6e07-4037-8a8c-0ca3da0d2ac3\" (UID: \"605042d5-6e07-4037-8a8c-0ca3da0d2ac3\") " Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.306046 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-scripts\") pod \"605042d5-6e07-4037-8a8c-0ca3da0d2ac3\" (UID: \"605042d5-6e07-4037-8a8c-0ca3da0d2ac3\") " Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.307221 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93-config-data" (OuterVolumeSpecName: "config-data") pod "5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93" (UID: "5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.307311 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93" (UID: "5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.311225 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "605042d5-6e07-4037-8a8c-0ca3da0d2ac3" (UID: "605042d5-6e07-4037-8a8c-0ca3da0d2ac3"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.311497 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-kube-api-access-nbqfs" (OuterVolumeSpecName: "kube-api-access-nbqfs") pod "605042d5-6e07-4037-8a8c-0ca3da0d2ac3" (UID: "605042d5-6e07-4037-8a8c-0ca3da0d2ac3"). InnerVolumeSpecName "kube-api-access-nbqfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.311504 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-scripts" (OuterVolumeSpecName: "scripts") pod "605042d5-6e07-4037-8a8c-0ca3da0d2ac3" (UID: "605042d5-6e07-4037-8a8c-0ca3da0d2ac3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.311799 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "605042d5-6e07-4037-8a8c-0ca3da0d2ac3" (UID: "605042d5-6e07-4037-8a8c-0ca3da0d2ac3"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.311839 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93-kube-api-access-2rkqz" (OuterVolumeSpecName: "kube-api-access-2rkqz") pod "5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93" (UID: "5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93"). InnerVolumeSpecName "kube-api-access-2rkqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.325694 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-config-data" (OuterVolumeSpecName: "config-data") pod "605042d5-6e07-4037-8a8c-0ca3da0d2ac3" (UID: "605042d5-6e07-4037-8a8c-0ca3da0d2ac3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.379822 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.407899 4721 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.407936 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.407981 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2rkqz\" (UniqueName: \"kubernetes.io/projected/5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93-kube-api-access-2rkqz\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.408000 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nbqfs\" (UniqueName: \"kubernetes.io/projected/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-kube-api-access-nbqfs\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.408015 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.408028 4721 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.408043 4721 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.408057 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/605042d5-6e07-4037-8a8c-0ca3da0d2ac3-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.459136 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/barbican-db-create-84xxm"] Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.470578 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/barbican-db-create-84xxm"] Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.479147 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/barbican958c-account-delete-77bvr"] Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.485958 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/barbican-958c-account-create-update-hm5b5"] Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.491294 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/barbican958c-account-delete-77bvr"] Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.495507 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/barbican-958c-account-create-update-hm5b5"] Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.508911 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/89920af2-3af7-4428-80fa-ddf2ba97dfb9-pod-info\") pod \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.508990 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/89920af2-3af7-4428-80fa-ddf2ba97dfb9-rabbitmq-erlang-cookie\") pod \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.509021 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bp2dh\" (UniqueName: \"kubernetes.io/projected/89920af2-3af7-4428-80fa-ddf2ba97dfb9-kube-api-access-bp2dh\") pod \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.509042 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/89920af2-3af7-4428-80fa-ddf2ba97dfb9-rabbitmq-confd\") pod \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.509071 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/89920af2-3af7-4428-80fa-ddf2ba97dfb9-rabbitmq-plugins\") pod \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.509205 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fb30f42f-5218-4cc7-bbdd-7032754704a9\") pod \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.509265 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/89920af2-3af7-4428-80fa-ddf2ba97dfb9-plugins-conf\") pod \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.509297 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/89920af2-3af7-4428-80fa-ddf2ba97dfb9-erlang-cookie-secret\") pod \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\" (UID: \"89920af2-3af7-4428-80fa-ddf2ba97dfb9\") " Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.509651 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89920af2-3af7-4428-80fa-ddf2ba97dfb9-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "89920af2-3af7-4428-80fa-ddf2ba97dfb9" (UID: "89920af2-3af7-4428-80fa-ddf2ba97dfb9"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.509667 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89920af2-3af7-4428-80fa-ddf2ba97dfb9-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "89920af2-3af7-4428-80fa-ddf2ba97dfb9" (UID: "89920af2-3af7-4428-80fa-ddf2ba97dfb9"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.510027 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89920af2-3af7-4428-80fa-ddf2ba97dfb9-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "89920af2-3af7-4428-80fa-ddf2ba97dfb9" (UID: "89920af2-3af7-4428-80fa-ddf2ba97dfb9"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.513352 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/89920af2-3af7-4428-80fa-ddf2ba97dfb9-pod-info" (OuterVolumeSpecName: "pod-info") pod "89920af2-3af7-4428-80fa-ddf2ba97dfb9" (UID: "89920af2-3af7-4428-80fa-ddf2ba97dfb9"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.515084 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89920af2-3af7-4428-80fa-ddf2ba97dfb9-kube-api-access-bp2dh" (OuterVolumeSpecName: "kube-api-access-bp2dh") pod "89920af2-3af7-4428-80fa-ddf2ba97dfb9" (UID: "89920af2-3af7-4428-80fa-ddf2ba97dfb9"). InnerVolumeSpecName "kube-api-access-bp2dh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.515128 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89920af2-3af7-4428-80fa-ddf2ba97dfb9-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "89920af2-3af7-4428-80fa-ddf2ba97dfb9" (UID: "89920af2-3af7-4428-80fa-ddf2ba97dfb9"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.519793 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fb30f42f-5218-4cc7-bbdd-7032754704a9" (OuterVolumeSpecName: "persistence") pod "89920af2-3af7-4428-80fa-ddf2ba97dfb9" (UID: "89920af2-3af7-4428-80fa-ddf2ba97dfb9"). InnerVolumeSpecName "pvc-fb30f42f-5218-4cc7-bbdd-7032754704a9". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.576976 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89920af2-3af7-4428-80fa-ddf2ba97dfb9-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "89920af2-3af7-4428-80fa-ddf2ba97dfb9" (UID: "89920af2-3af7-4428-80fa-ddf2ba97dfb9"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.611088 4721 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/89920af2-3af7-4428-80fa-ddf2ba97dfb9-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.611151 4721 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-fb30f42f-5218-4cc7-bbdd-7032754704a9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fb30f42f-5218-4cc7-bbdd-7032754704a9\") on node \"crc\" " Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.611169 4721 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/89920af2-3af7-4428-80fa-ddf2ba97dfb9-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.611184 4721 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/89920af2-3af7-4428-80fa-ddf2ba97dfb9-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.611195 4721 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/89920af2-3af7-4428-80fa-ddf2ba97dfb9-pod-info\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.611205 4721 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/89920af2-3af7-4428-80fa-ddf2ba97dfb9-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.611219 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bp2dh\" (UniqueName: \"kubernetes.io/projected/89920af2-3af7-4428-80fa-ddf2ba97dfb9-kube-api-access-bp2dh\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.611230 4721 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/89920af2-3af7-4428-80fa-ddf2ba97dfb9-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.625589 4721 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.625724 4721 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-fb30f42f-5218-4cc7-bbdd-7032754704a9" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fb30f42f-5218-4cc7-bbdd-7032754704a9") on node "crc" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.712106 4721 reconciler_common.go:293] "Volume detached for volume \"pvc-fb30f42f-5218-4cc7-bbdd-7032754704a9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fb30f42f-5218-4cc7-bbdd-7032754704a9\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.890695 4721 generic.go:334] "Generic (PLEG): container finished" podID="5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93" containerID="fdf853adc3af7a138953946a1a769d8a059b52b64d216157b4d6da2c575ce21a" exitCode=0 Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.890756 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/memcached-0" event={"ID":"5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93","Type":"ContainerDied","Data":"fdf853adc3af7a138953946a1a769d8a059b52b64d216157b4d6da2c575ce21a"} Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.890782 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/memcached-0" event={"ID":"5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93","Type":"ContainerDied","Data":"f9074cdc2b46e8b6f2aed9c0a79c0a72e71e8994593e03ea3c51431f56e2d7b9"} Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.890812 4721 scope.go:117] "RemoveContainer" containerID="fdf853adc3af7a138953946a1a769d8a059b52b64d216157b4d6da2c575ce21a" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.890773 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/memcached-0" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.892337 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone-78b88b8749-htdrf" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.893049 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone-78b88b8749-htdrf" event={"ID":"605042d5-6e07-4037-8a8c-0ca3da0d2ac3","Type":"ContainerDied","Data":"865e06bbc0fec8475d3e53dde884477cd9e7ec52ba7274a7f457f633d2602b7c"} Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.900005 4721 generic.go:334] "Generic (PLEG): container finished" podID="89920af2-3af7-4428-80fa-ddf2ba97dfb9" containerID="3cc5505dcaf4c8eed98a80d0bcc290be4d06c7a19329d3cb28c29862fb1f1188" exitCode=0 Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.900046 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/rabbitmq-server-0" event={"ID":"89920af2-3af7-4428-80fa-ddf2ba97dfb9","Type":"ContainerDied","Data":"3cc5505dcaf4c8eed98a80d0bcc290be4d06c7a19329d3cb28c29862fb1f1188"} Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.900072 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/rabbitmq-server-0" event={"ID":"89920af2-3af7-4428-80fa-ddf2ba97dfb9","Type":"ContainerDied","Data":"5177285996d31ce58bb9dca4ed33ba9f6b0f3769f466c38c9494b3d5b95e6a0e"} Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.900084 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/rabbitmq-server-0" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.911371 4721 scope.go:117] "RemoveContainer" containerID="fdf853adc3af7a138953946a1a769d8a059b52b64d216157b4d6da2c575ce21a" Dec 02 12:16:19 crc kubenswrapper[4721]: E1202 12:16:19.913428 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fdf853adc3af7a138953946a1a769d8a059b52b64d216157b4d6da2c575ce21a\": container with ID starting with fdf853adc3af7a138953946a1a769d8a059b52b64d216157b4d6da2c575ce21a not found: ID does not exist" containerID="fdf853adc3af7a138953946a1a769d8a059b52b64d216157b4d6da2c575ce21a" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.913486 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fdf853adc3af7a138953946a1a769d8a059b52b64d216157b4d6da2c575ce21a"} err="failed to get container status \"fdf853adc3af7a138953946a1a769d8a059b52b64d216157b4d6da2c575ce21a\": rpc error: code = NotFound desc = could not find container \"fdf853adc3af7a138953946a1a769d8a059b52b64d216157b4d6da2c575ce21a\": container with ID starting with fdf853adc3af7a138953946a1a769d8a059b52b64d216157b4d6da2c575ce21a not found: ID does not exist" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.913519 4721 scope.go:117] "RemoveContainer" containerID="e919556c6b839b7be54606fc02688391443d37428cca6d7e107bca50c848a403" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.937691 4721 scope.go:117] "RemoveContainer" containerID="3cc5505dcaf4c8eed98a80d0bcc290be4d06c7a19329d3cb28c29862fb1f1188" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.941159 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/keystone-78b88b8749-htdrf"] Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.958737 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/keystone-78b88b8749-htdrf"] Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.963833 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/memcached-0"] Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.968782 4721 scope.go:117] "RemoveContainer" containerID="c07b2c4f609e7871d95b88adeef4fe6df645e60283b4626904994275357d4003" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.970897 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/memcached-0"] Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.977128 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/rabbitmq-server-0"] Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.981551 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/rabbitmq-server-0"] Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.982408 4721 scope.go:117] "RemoveContainer" containerID="3cc5505dcaf4c8eed98a80d0bcc290be4d06c7a19329d3cb28c29862fb1f1188" Dec 02 12:16:19 crc kubenswrapper[4721]: E1202 12:16:19.982720 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3cc5505dcaf4c8eed98a80d0bcc290be4d06c7a19329d3cb28c29862fb1f1188\": container with ID starting with 3cc5505dcaf4c8eed98a80d0bcc290be4d06c7a19329d3cb28c29862fb1f1188 not found: ID does not exist" containerID="3cc5505dcaf4c8eed98a80d0bcc290be4d06c7a19329d3cb28c29862fb1f1188" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.982745 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3cc5505dcaf4c8eed98a80d0bcc290be4d06c7a19329d3cb28c29862fb1f1188"} err="failed to get container status \"3cc5505dcaf4c8eed98a80d0bcc290be4d06c7a19329d3cb28c29862fb1f1188\": rpc error: code = NotFound desc = could not find container \"3cc5505dcaf4c8eed98a80d0bcc290be4d06c7a19329d3cb28c29862fb1f1188\": container with ID starting with 3cc5505dcaf4c8eed98a80d0bcc290be4d06c7a19329d3cb28c29862fb1f1188 not found: ID does not exist" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.982766 4721 scope.go:117] "RemoveContainer" containerID="c07b2c4f609e7871d95b88adeef4fe6df645e60283b4626904994275357d4003" Dec 02 12:16:19 crc kubenswrapper[4721]: E1202 12:16:19.982982 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c07b2c4f609e7871d95b88adeef4fe6df645e60283b4626904994275357d4003\": container with ID starting with c07b2c4f609e7871d95b88adeef4fe6df645e60283b4626904994275357d4003 not found: ID does not exist" containerID="c07b2c4f609e7871d95b88adeef4fe6df645e60283b4626904994275357d4003" Dec 02 12:16:19 crc kubenswrapper[4721]: I1202 12:16:19.983003 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c07b2c4f609e7871d95b88adeef4fe6df645e60283b4626904994275357d4003"} err="failed to get container status \"c07b2c4f609e7871d95b88adeef4fe6df645e60283b4626904994275357d4003\": rpc error: code = NotFound desc = could not find container \"c07b2c4f609e7871d95b88adeef4fe6df645e60283b4626904994275357d4003\": container with ID starting with c07b2c4f609e7871d95b88adeef4fe6df645e60283b4626904994275357d4003 not found: ID does not exist" Dec 02 12:16:20 crc kubenswrapper[4721]: E1202 12:16:20.425303 4721 configmap.go:193] Couldn't get configMap swift-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Dec 02 12:16:20 crc kubenswrapper[4721]: E1202 12:16:20.425705 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7349d6cc-58af-4d67-a865-31d8409c29cd-operator-scripts podName:7349d6cc-58af-4d67-a865-31d8409c29cd nodeName:}" failed. No retries permitted until 2025-12-02 12:16:24.425684782 +0000 UTC m=+1514.164235131 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/7349d6cc-58af-4d67-a865-31d8409c29cd-operator-scripts") pod "keystone1088-account-delete-vlzc9" (UID: "7349d6cc-58af-4d67-a865-31d8409c29cd") : configmap "openstack-scripts" not found Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.552525 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-1" Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.627249 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/91d5a64b-5c90-4fba-9e6c-416071e412b3-kolla-config\") pod \"91d5a64b-5c90-4fba-9e6c-416071e412b3\" (UID: \"91d5a64b-5c90-4fba-9e6c-416071e412b3\") " Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.627300 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"91d5a64b-5c90-4fba-9e6c-416071e412b3\" (UID: \"91d5a64b-5c90-4fba-9e6c-416071e412b3\") " Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.627422 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/91d5a64b-5c90-4fba-9e6c-416071e412b3-config-data-generated\") pod \"91d5a64b-5c90-4fba-9e6c-416071e412b3\" (UID: \"91d5a64b-5c90-4fba-9e6c-416071e412b3\") " Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.627840 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91d5a64b-5c90-4fba-9e6c-416071e412b3-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "91d5a64b-5c90-4fba-9e6c-416071e412b3" (UID: "91d5a64b-5c90-4fba-9e6c-416071e412b3"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.627888 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dq4ng\" (UniqueName: \"kubernetes.io/projected/91d5a64b-5c90-4fba-9e6c-416071e412b3-kube-api-access-dq4ng\") pod \"91d5a64b-5c90-4fba-9e6c-416071e412b3\" (UID: \"91d5a64b-5c90-4fba-9e6c-416071e412b3\") " Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.627975 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/91d5a64b-5c90-4fba-9e6c-416071e412b3-config-data-default\") pod \"91d5a64b-5c90-4fba-9e6c-416071e412b3\" (UID: \"91d5a64b-5c90-4fba-9e6c-416071e412b3\") " Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.627997 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91d5a64b-5c90-4fba-9e6c-416071e412b3-operator-scripts\") pod \"91d5a64b-5c90-4fba-9e6c-416071e412b3\" (UID: \"91d5a64b-5c90-4fba-9e6c-416071e412b3\") " Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.628450 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91d5a64b-5c90-4fba-9e6c-416071e412b3-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "91d5a64b-5c90-4fba-9e6c-416071e412b3" (UID: "91d5a64b-5c90-4fba-9e6c-416071e412b3"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.628588 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91d5a64b-5c90-4fba-9e6c-416071e412b3-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "91d5a64b-5c90-4fba-9e6c-416071e412b3" (UID: "91d5a64b-5c90-4fba-9e6c-416071e412b3"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.628861 4721 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/91d5a64b-5c90-4fba-9e6c-416071e412b3-config-data-generated\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.628883 4721 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/91d5a64b-5c90-4fba-9e6c-416071e412b3-config-data-default\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.628896 4721 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/91d5a64b-5c90-4fba-9e6c-416071e412b3-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.629014 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91d5a64b-5c90-4fba-9e6c-416071e412b3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "91d5a64b-5c90-4fba-9e6c-416071e412b3" (UID: "91d5a64b-5c90-4fba-9e6c-416071e412b3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.634478 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91d5a64b-5c90-4fba-9e6c-416071e412b3-kube-api-access-dq4ng" (OuterVolumeSpecName: "kube-api-access-dq4ng") pod "91d5a64b-5c90-4fba-9e6c-416071e412b3" (UID: "91d5a64b-5c90-4fba-9e6c-416071e412b3"). InnerVolumeSpecName "kube-api-access-dq4ng". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.648163 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "mysql-db") pod "91d5a64b-5c90-4fba-9e6c-416071e412b3" (UID: "91d5a64b-5c90-4fba-9e6c-416071e412b3"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.674539 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/swift-operator-controller-manager-8b6cbcf7c-qdvhm"] Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.674781 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/swift-operator-controller-manager-8b6cbcf7c-qdvhm" podUID="cc05df77-4dd2-4d22-87e8-7a0e9ff5181c" containerName="manager" containerID="cri-o://35977b5d3981a8b0aa61a0e9c1cf728e40d4ded2985eecd9d6570d56959ee1b6" gracePeriod=10 Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.699587 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/keystone-db-create-q9phr"] Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.709419 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/keystone-db-create-q9phr"] Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.731923 4721 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.731965 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dq4ng\" (UniqueName: \"kubernetes.io/projected/91d5a64b-5c90-4fba-9e6c-416071e412b3-kube-api-access-dq4ng\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.731980 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91d5a64b-5c90-4fba-9e6c-416071e412b3-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.735001 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/keystone-1088-account-create-update-qxbc8"] Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.746773 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/openstack-galera-0" podUID="46db9721-f25f-4725-b8ea-8eda2b7ce1b3" containerName="galera" containerID="cri-o://299da88363d9f9d45361fd72fc2bb5bd9e4c749a6d935d47ed2253c4a893d2da" gracePeriod=26 Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.749259 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/keystone1088-account-delete-vlzc9"] Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.749500 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="swift-kuttl-tests/keystone1088-account-delete-vlzc9" podUID="7349d6cc-58af-4d67-a865-31d8409c29cd" containerName="mariadb-account-delete" containerID="cri-o://cbdd31746d6f20e86a32ba99fee61c3e9dadc5f76ccd0ff940a41d5055c5aa11" gracePeriod=30 Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.752891 4721 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.757485 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/keystone-1088-account-create-update-qxbc8"] Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.834133 4721 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.848015 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04a81817-b7ad-473c-8a11-a79c9e61bcb7" path="/var/lib/kubelet/pods/04a81817-b7ad-473c-8a11-a79c9e61bcb7/volumes" Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.848588 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0da604f8-2b6f-40a1-82ca-27842e0bb72e" path="/var/lib/kubelet/pods/0da604f8-2b6f-40a1-82ca-27842e0bb72e/volumes" Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.849031 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55ef2351-e56c-4a46-8350-1ed9f1dae8c7" path="/var/lib/kubelet/pods/55ef2351-e56c-4a46-8350-1ed9f1dae8c7/volumes" Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.849885 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93" path="/var/lib/kubelet/pods/5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93/volumes" Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.850318 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="605042d5-6e07-4037-8a8c-0ca3da0d2ac3" path="/var/lib/kubelet/pods/605042d5-6e07-4037-8a8c-0ca3da0d2ac3/volumes" Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.850777 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620" path="/var/lib/kubelet/pods/7640cbd4-a5b8-4bb1-9fa8-d2d3eaa74620/volumes" Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.851215 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="783d4f63-0dcf-433e-8485-ccc511a31589" path="/var/lib/kubelet/pods/783d4f63-0dcf-433e-8485-ccc511a31589/volumes" Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.852962 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89920af2-3af7-4428-80fa-ddf2ba97dfb9" path="/var/lib/kubelet/pods/89920af2-3af7-4428-80fa-ddf2ba97dfb9/volumes" Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.853529 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a721153e-8340-4695-8dfa-7c47d0f9ca66" path="/var/lib/kubelet/pods/a721153e-8340-4695-8dfa-7c47d0f9ca66/volumes" Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.921285 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/swift-operator-index-rrq54"] Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.923439 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/swift-operator-index-rrq54" podUID="94c0083a-0c9e-43fc-9ad3-46517e9b4adb" containerName="registry-server" containerID="cri-o://5bebcd28f5e6ce63bc3e97ea20dfc561b0d551275460afaaba1c334b1504622d" gracePeriod=30 Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.938988 4721 generic.go:334] "Generic (PLEG): container finished" podID="91d5a64b-5c90-4fba-9e6c-416071e412b3" containerID="1e594e97f790515c0f3aad49aef85f5150f7494e2dba574c1661a9c5ca3bdb09" exitCode=0 Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.939049 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-1" event={"ID":"91d5a64b-5c90-4fba-9e6c-416071e412b3","Type":"ContainerDied","Data":"1e594e97f790515c0f3aad49aef85f5150f7494e2dba574c1661a9c5ca3bdb09"} Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.939074 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-1" event={"ID":"91d5a64b-5c90-4fba-9e6c-416071e412b3","Type":"ContainerDied","Data":"db9a63ebc6c9f9e6ab04641bc4366bd5b06d693206076a9d1b55882a8d148bac"} Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.939090 4721 scope.go:117] "RemoveContainer" containerID="1e594e97f790515c0f3aad49aef85f5150f7494e2dba574c1661a9c5ca3bdb09" Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.939178 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-1" Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.951831 4721 generic.go:334] "Generic (PLEG): container finished" podID="cc05df77-4dd2-4d22-87e8-7a0e9ff5181c" containerID="35977b5d3981a8b0aa61a0e9c1cf728e40d4ded2985eecd9d6570d56959ee1b6" exitCode=0 Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.951925 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-8b6cbcf7c-qdvhm" event={"ID":"cc05df77-4dd2-4d22-87e8-7a0e9ff5181c","Type":"ContainerDied","Data":"35977b5d3981a8b0aa61a0e9c1cf728e40d4ded2985eecd9d6570d56959ee1b6"} Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.961232 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb"] Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.971056 4721 scope.go:117] "RemoveContainer" containerID="40508839ff7c4ec3bab04ada05d57b9c0f9ff6f42340cbc4cbde8a7956279fb9" Dec 02 12:16:20 crc kubenswrapper[4721]: I1202 12:16:20.976295 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/f76edb09382185fcd5193f5e43245d885ebdaa4d7a71a8a0c7e3302a284gmsb"] Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.023910 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/openstack-galera-1"] Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.024523 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/openstack-galera-1"] Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.029100 4721 scope.go:117] "RemoveContainer" containerID="1e594e97f790515c0f3aad49aef85f5150f7494e2dba574c1661a9c5ca3bdb09" Dec 02 12:16:21 crc kubenswrapper[4721]: E1202 12:16:21.032179 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e594e97f790515c0f3aad49aef85f5150f7494e2dba574c1661a9c5ca3bdb09\": container with ID starting with 1e594e97f790515c0f3aad49aef85f5150f7494e2dba574c1661a9c5ca3bdb09 not found: ID does not exist" containerID="1e594e97f790515c0f3aad49aef85f5150f7494e2dba574c1661a9c5ca3bdb09" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.032221 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e594e97f790515c0f3aad49aef85f5150f7494e2dba574c1661a9c5ca3bdb09"} err="failed to get container status \"1e594e97f790515c0f3aad49aef85f5150f7494e2dba574c1661a9c5ca3bdb09\": rpc error: code = NotFound desc = could not find container \"1e594e97f790515c0f3aad49aef85f5150f7494e2dba574c1661a9c5ca3bdb09\": container with ID starting with 1e594e97f790515c0f3aad49aef85f5150f7494e2dba574c1661a9c5ca3bdb09 not found: ID does not exist" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.032247 4721 scope.go:117] "RemoveContainer" containerID="40508839ff7c4ec3bab04ada05d57b9c0f9ff6f42340cbc4cbde8a7956279fb9" Dec 02 12:16:21 crc kubenswrapper[4721]: E1202 12:16:21.032644 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40508839ff7c4ec3bab04ada05d57b9c0f9ff6f42340cbc4cbde8a7956279fb9\": container with ID starting with 40508839ff7c4ec3bab04ada05d57b9c0f9ff6f42340cbc4cbde8a7956279fb9 not found: ID does not exist" containerID="40508839ff7c4ec3bab04ada05d57b9c0f9ff6f42340cbc4cbde8a7956279fb9" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.032723 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40508839ff7c4ec3bab04ada05d57b9c0f9ff6f42340cbc4cbde8a7956279fb9"} err="failed to get container status \"40508839ff7c4ec3bab04ada05d57b9c0f9ff6f42340cbc4cbde8a7956279fb9\": rpc error: code = NotFound desc = could not find container \"40508839ff7c4ec3bab04ada05d57b9c0f9ff6f42340cbc4cbde8a7956279fb9\": container with ID starting with 40508839ff7c4ec3bab04ada05d57b9c0f9ff6f42340cbc4cbde8a7956279fb9 not found: ID does not exist" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.123624 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-8b6cbcf7c-qdvhm" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.246928 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cc05df77-4dd2-4d22-87e8-7a0e9ff5181c-apiservice-cert\") pod \"cc05df77-4dd2-4d22-87e8-7a0e9ff5181c\" (UID: \"cc05df77-4dd2-4d22-87e8-7a0e9ff5181c\") " Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.247014 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cc05df77-4dd2-4d22-87e8-7a0e9ff5181c-webhook-cert\") pod \"cc05df77-4dd2-4d22-87e8-7a0e9ff5181c\" (UID: \"cc05df77-4dd2-4d22-87e8-7a0e9ff5181c\") " Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.247057 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x89w9\" (UniqueName: \"kubernetes.io/projected/cc05df77-4dd2-4d22-87e8-7a0e9ff5181c-kube-api-access-x89w9\") pod \"cc05df77-4dd2-4d22-87e8-7a0e9ff5181c\" (UID: \"cc05df77-4dd2-4d22-87e8-7a0e9ff5181c\") " Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.253287 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc05df77-4dd2-4d22-87e8-7a0e9ff5181c-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "cc05df77-4dd2-4d22-87e8-7a0e9ff5181c" (UID: "cc05df77-4dd2-4d22-87e8-7a0e9ff5181c"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.256938 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc05df77-4dd2-4d22-87e8-7a0e9ff5181c-kube-api-access-x89w9" (OuterVolumeSpecName: "kube-api-access-x89w9") pod "cc05df77-4dd2-4d22-87e8-7a0e9ff5181c" (UID: "cc05df77-4dd2-4d22-87e8-7a0e9ff5181c"). InnerVolumeSpecName "kube-api-access-x89w9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.261504 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc05df77-4dd2-4d22-87e8-7a0e9ff5181c-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "cc05df77-4dd2-4d22-87e8-7a0e9ff5181c" (UID: "cc05df77-4dd2-4d22-87e8-7a0e9ff5181c"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.338182 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-rrq54" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.348570 4721 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cc05df77-4dd2-4d22-87e8-7a0e9ff5181c-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.348610 4721 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cc05df77-4dd2-4d22-87e8-7a0e9ff5181c-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.348623 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x89w9\" (UniqueName: \"kubernetes.io/projected/cc05df77-4dd2-4d22-87e8-7a0e9ff5181c-kube-api-access-x89w9\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.449161 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rh6cs\" (UniqueName: \"kubernetes.io/projected/94c0083a-0c9e-43fc-9ad3-46517e9b4adb-kube-api-access-rh6cs\") pod \"94c0083a-0c9e-43fc-9ad3-46517e9b4adb\" (UID: \"94c0083a-0c9e-43fc-9ad3-46517e9b4adb\") " Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.453030 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94c0083a-0c9e-43fc-9ad3-46517e9b4adb-kube-api-access-rh6cs" (OuterVolumeSpecName: "kube-api-access-rh6cs") pod "94c0083a-0c9e-43fc-9ad3-46517e9b4adb" (UID: "94c0083a-0c9e-43fc-9ad3-46517e9b4adb"). InnerVolumeSpecName "kube-api-access-rh6cs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.550907 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rh6cs\" (UniqueName: \"kubernetes.io/projected/94c0083a-0c9e-43fc-9ad3-46517e9b4adb-kube-api-access-rh6cs\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.778587 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-0" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.854696 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fbjst\" (UniqueName: \"kubernetes.io/projected/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-kube-api-access-fbjst\") pod \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\" (UID: \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\") " Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.854745 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-operator-scripts\") pod \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\" (UID: \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\") " Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.854807 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\" (UID: \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\") " Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.854851 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-config-data-default\") pod \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\" (UID: \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\") " Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.854879 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-kolla-config\") pod \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\" (UID: \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\") " Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.854981 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-config-data-generated\") pod \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\" (UID: \"46db9721-f25f-4725-b8ea-8eda2b7ce1b3\") " Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.855669 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "46db9721-f25f-4725-b8ea-8eda2b7ce1b3" (UID: "46db9721-f25f-4725-b8ea-8eda2b7ce1b3"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.856079 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "46db9721-f25f-4725-b8ea-8eda2b7ce1b3" (UID: "46db9721-f25f-4725-b8ea-8eda2b7ce1b3"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.856091 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "46db9721-f25f-4725-b8ea-8eda2b7ce1b3" (UID: "46db9721-f25f-4725-b8ea-8eda2b7ce1b3"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.856222 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "46db9721-f25f-4725-b8ea-8eda2b7ce1b3" (UID: "46db9721-f25f-4725-b8ea-8eda2b7ce1b3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.858006 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-kube-api-access-fbjst" (OuterVolumeSpecName: "kube-api-access-fbjst") pod "46db9721-f25f-4725-b8ea-8eda2b7ce1b3" (UID: "46db9721-f25f-4725-b8ea-8eda2b7ce1b3"). InnerVolumeSpecName "kube-api-access-fbjst". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.866717 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "mysql-db") pod "46db9721-f25f-4725-b8ea-8eda2b7ce1b3" (UID: "46db9721-f25f-4725-b8ea-8eda2b7ce1b3"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.957235 4721 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-config-data-generated\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.957365 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fbjst\" (UniqueName: \"kubernetes.io/projected/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-kube-api-access-fbjst\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.957389 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.957418 4721 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.957432 4721 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-config-data-default\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.957442 4721 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/46db9721-f25f-4725-b8ea-8eda2b7ce1b3-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.963704 4721 generic.go:334] "Generic (PLEG): container finished" podID="46db9721-f25f-4725-b8ea-8eda2b7ce1b3" containerID="299da88363d9f9d45361fd72fc2bb5bd9e4c749a6d935d47ed2253c4a893d2da" exitCode=0 Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.963779 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-0" event={"ID":"46db9721-f25f-4725-b8ea-8eda2b7ce1b3","Type":"ContainerDied","Data":"299da88363d9f9d45361fd72fc2bb5bd9e4c749a6d935d47ed2253c4a893d2da"} Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.963783 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/openstack-galera-0" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.963807 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/openstack-galera-0" event={"ID":"46db9721-f25f-4725-b8ea-8eda2b7ce1b3","Type":"ContainerDied","Data":"f055d4b2584ee31e4c4e6975535a2dbd011833a137aad79064486255acd75571"} Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.963827 4721 scope.go:117] "RemoveContainer" containerID="299da88363d9f9d45361fd72fc2bb5bd9e4c749a6d935d47ed2253c4a893d2da" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.965380 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-8b6cbcf7c-qdvhm" event={"ID":"cc05df77-4dd2-4d22-87e8-7a0e9ff5181c","Type":"ContainerDied","Data":"db096804f2a36442bddeef9f7f17e762d03ac4ef1347f9301fe07d3e70eecf39"} Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.965392 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-8b6cbcf7c-qdvhm" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.980524 4721 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.984731 4721 generic.go:334] "Generic (PLEG): container finished" podID="94c0083a-0c9e-43fc-9ad3-46517e9b4adb" containerID="5bebcd28f5e6ce63bc3e97ea20dfc561b0d551275460afaaba1c334b1504622d" exitCode=0 Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.984771 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-rrq54" event={"ID":"94c0083a-0c9e-43fc-9ad3-46517e9b4adb","Type":"ContainerDied","Data":"5bebcd28f5e6ce63bc3e97ea20dfc561b0d551275460afaaba1c334b1504622d"} Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.984796 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-rrq54" event={"ID":"94c0083a-0c9e-43fc-9ad3-46517e9b4adb","Type":"ContainerDied","Data":"d93c57e85b1009a41e62863acb39fd4f4f1887e1629cbb56b8c66b2d96168cef"} Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.984881 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-rrq54" Dec 02 12:16:21 crc kubenswrapper[4721]: I1202 12:16:21.990675 4721 scope.go:117] "RemoveContainer" containerID="db916a79f327d46a3d34c7694ea9f2b6b60fedcfe05c09e8fbdbfcea06065517" Dec 02 12:16:22 crc kubenswrapper[4721]: I1202 12:16:22.007996 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/openstack-galera-0"] Dec 02 12:16:22 crc kubenswrapper[4721]: I1202 12:16:22.013541 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/openstack-galera-0"] Dec 02 12:16:22 crc kubenswrapper[4721]: I1202 12:16:22.024251 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/swift-operator-controller-manager-8b6cbcf7c-qdvhm"] Dec 02 12:16:22 crc kubenswrapper[4721]: I1202 12:16:22.032873 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/swift-operator-controller-manager-8b6cbcf7c-qdvhm"] Dec 02 12:16:22 crc kubenswrapper[4721]: I1202 12:16:22.035709 4721 scope.go:117] "RemoveContainer" containerID="299da88363d9f9d45361fd72fc2bb5bd9e4c749a6d935d47ed2253c4a893d2da" Dec 02 12:16:22 crc kubenswrapper[4721]: I1202 12:16:22.037703 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/swift-operator-index-rrq54"] Dec 02 12:16:22 crc kubenswrapper[4721]: E1202 12:16:22.037911 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"299da88363d9f9d45361fd72fc2bb5bd9e4c749a6d935d47ed2253c4a893d2da\": container with ID starting with 299da88363d9f9d45361fd72fc2bb5bd9e4c749a6d935d47ed2253c4a893d2da not found: ID does not exist" containerID="299da88363d9f9d45361fd72fc2bb5bd9e4c749a6d935d47ed2253c4a893d2da" Dec 02 12:16:22 crc kubenswrapper[4721]: I1202 12:16:22.037949 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"299da88363d9f9d45361fd72fc2bb5bd9e4c749a6d935d47ed2253c4a893d2da"} err="failed to get container status \"299da88363d9f9d45361fd72fc2bb5bd9e4c749a6d935d47ed2253c4a893d2da\": rpc error: code = NotFound desc = could not find container \"299da88363d9f9d45361fd72fc2bb5bd9e4c749a6d935d47ed2253c4a893d2da\": container with ID starting with 299da88363d9f9d45361fd72fc2bb5bd9e4c749a6d935d47ed2253c4a893d2da not found: ID does not exist" Dec 02 12:16:22 crc kubenswrapper[4721]: I1202 12:16:22.037992 4721 scope.go:117] "RemoveContainer" containerID="db916a79f327d46a3d34c7694ea9f2b6b60fedcfe05c09e8fbdbfcea06065517" Dec 02 12:16:22 crc kubenswrapper[4721]: E1202 12:16:22.038445 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db916a79f327d46a3d34c7694ea9f2b6b60fedcfe05c09e8fbdbfcea06065517\": container with ID starting with db916a79f327d46a3d34c7694ea9f2b6b60fedcfe05c09e8fbdbfcea06065517 not found: ID does not exist" containerID="db916a79f327d46a3d34c7694ea9f2b6b60fedcfe05c09e8fbdbfcea06065517" Dec 02 12:16:22 crc kubenswrapper[4721]: I1202 12:16:22.038489 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db916a79f327d46a3d34c7694ea9f2b6b60fedcfe05c09e8fbdbfcea06065517"} err="failed to get container status \"db916a79f327d46a3d34c7694ea9f2b6b60fedcfe05c09e8fbdbfcea06065517\": rpc error: code = NotFound desc = could not find container \"db916a79f327d46a3d34c7694ea9f2b6b60fedcfe05c09e8fbdbfcea06065517\": container with ID starting with db916a79f327d46a3d34c7694ea9f2b6b60fedcfe05c09e8fbdbfcea06065517 not found: ID does not exist" Dec 02 12:16:22 crc kubenswrapper[4721]: I1202 12:16:22.038534 4721 scope.go:117] "RemoveContainer" containerID="35977b5d3981a8b0aa61a0e9c1cf728e40d4ded2985eecd9d6570d56959ee1b6" Dec 02 12:16:22 crc kubenswrapper[4721]: I1202 12:16:22.041877 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/swift-operator-index-rrq54"] Dec 02 12:16:22 crc kubenswrapper[4721]: I1202 12:16:22.058018 4721 scope.go:117] "RemoveContainer" containerID="5bebcd28f5e6ce63bc3e97ea20dfc561b0d551275460afaaba1c334b1504622d" Dec 02 12:16:22 crc kubenswrapper[4721]: I1202 12:16:22.058699 4721 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:22 crc kubenswrapper[4721]: I1202 12:16:22.074166 4721 scope.go:117] "RemoveContainer" containerID="5bebcd28f5e6ce63bc3e97ea20dfc561b0d551275460afaaba1c334b1504622d" Dec 02 12:16:22 crc kubenswrapper[4721]: E1202 12:16:22.074717 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5bebcd28f5e6ce63bc3e97ea20dfc561b0d551275460afaaba1c334b1504622d\": container with ID starting with 5bebcd28f5e6ce63bc3e97ea20dfc561b0d551275460afaaba1c334b1504622d not found: ID does not exist" containerID="5bebcd28f5e6ce63bc3e97ea20dfc561b0d551275460afaaba1c334b1504622d" Dec 02 12:16:22 crc kubenswrapper[4721]: I1202 12:16:22.074767 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bebcd28f5e6ce63bc3e97ea20dfc561b0d551275460afaaba1c334b1504622d"} err="failed to get container status \"5bebcd28f5e6ce63bc3e97ea20dfc561b0d551275460afaaba1c334b1504622d\": rpc error: code = NotFound desc = could not find container \"5bebcd28f5e6ce63bc3e97ea20dfc561b0d551275460afaaba1c334b1504622d\": container with ID starting with 5bebcd28f5e6ce63bc3e97ea20dfc561b0d551275460afaaba1c334b1504622d not found: ID does not exist" Dec 02 12:16:22 crc kubenswrapper[4721]: I1202 12:16:22.848994 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46db9721-f25f-4725-b8ea-8eda2b7ce1b3" path="/var/lib/kubelet/pods/46db9721-f25f-4725-b8ea-8eda2b7ce1b3/volumes" Dec 02 12:16:22 crc kubenswrapper[4721]: I1202 12:16:22.851549 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ef97fe9-ccd2-4131-80f8-8ef651bd503e" path="/var/lib/kubelet/pods/6ef97fe9-ccd2-4131-80f8-8ef651bd503e/volumes" Dec 02 12:16:22 crc kubenswrapper[4721]: I1202 12:16:22.852281 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91d5a64b-5c90-4fba-9e6c-416071e412b3" path="/var/lib/kubelet/pods/91d5a64b-5c90-4fba-9e6c-416071e412b3/volumes" Dec 02 12:16:22 crc kubenswrapper[4721]: I1202 12:16:22.853431 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94c0083a-0c9e-43fc-9ad3-46517e9b4adb" path="/var/lib/kubelet/pods/94c0083a-0c9e-43fc-9ad3-46517e9b4adb/volumes" Dec 02 12:16:22 crc kubenswrapper[4721]: I1202 12:16:22.853919 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc05df77-4dd2-4d22-87e8-7a0e9ff5181c" path="/var/lib/kubelet/pods/cc05df77-4dd2-4d22-87e8-7a0e9ff5181c/volumes" Dec 02 12:16:23 crc kubenswrapper[4721]: I1202 12:16:23.650132 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7cf47c6f5c-rw8nh"] Dec 02 12:16:23 crc kubenswrapper[4721]: I1202 12:16:23.650390 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/barbican-operator-controller-manager-7cf47c6f5c-rw8nh" podUID="f9214199-fedd-4dd1-8785-aab679b0178b" containerName="manager" containerID="cri-o://c97a160f62600a18331f406fb788af3f4416d91e53515e01cd8a2bd6f5f86ffc" gracePeriod=10 Dec 02 12:16:23 crc kubenswrapper[4721]: I1202 12:16:23.996725 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/barbican-operator-index-tmvr4"] Dec 02 12:16:23 crc kubenswrapper[4721]: I1202 12:16:23.997261 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/barbican-operator-index-tmvr4" podUID="89b339ea-d21f-4d65-bb93-926e793838ef" containerName="registry-server" containerID="cri-o://a1ec8b2f2103a8ef24ac54eb8c030ec2dcf7d68467f9ea7996722b3eeb6ab815" gracePeriod=30 Dec 02 12:16:24 crc kubenswrapper[4721]: I1202 12:16:24.033276 4721 generic.go:334] "Generic (PLEG): container finished" podID="f9214199-fedd-4dd1-8785-aab679b0178b" containerID="c97a160f62600a18331f406fb788af3f4416d91e53515e01cd8a2bd6f5f86ffc" exitCode=0 Dec 02 12:16:24 crc kubenswrapper[4721]: I1202 12:16:24.033321 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7cf47c6f5c-rw8nh" event={"ID":"f9214199-fedd-4dd1-8785-aab679b0178b","Type":"ContainerDied","Data":"c97a160f62600a18331f406fb788af3f4416d91e53515e01cd8a2bd6f5f86ffc"} Dec 02 12:16:24 crc kubenswrapper[4721]: I1202 12:16:24.033359 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7cf47c6f5c-rw8nh" event={"ID":"f9214199-fedd-4dd1-8785-aab679b0178b","Type":"ContainerDied","Data":"41f2c78ebb95dcb6d17375ab6bd2cee4dc3ee4e9fd263822b1a858c550671db0"} Dec 02 12:16:24 crc kubenswrapper[4721]: I1202 12:16:24.033369 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="41f2c78ebb95dcb6d17375ab6bd2cee4dc3ee4e9fd263822b1a858c550671db0" Dec 02 12:16:24 crc kubenswrapper[4721]: I1202 12:16:24.046983 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf"] Dec 02 12:16:24 crc kubenswrapper[4721]: I1202 12:16:24.053801 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/ee8a7e2925b38108919a6c4a37131c62bef9ed0b2003ea2b2120dc9476cjchf"] Dec 02 12:16:24 crc kubenswrapper[4721]: I1202 12:16:24.092796 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7cf47c6f5c-rw8nh" Dec 02 12:16:24 crc kubenswrapper[4721]: I1202 12:16:24.185581 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f9214199-fedd-4dd1-8785-aab679b0178b-apiservice-cert\") pod \"f9214199-fedd-4dd1-8785-aab679b0178b\" (UID: \"f9214199-fedd-4dd1-8785-aab679b0178b\") " Dec 02 12:16:24 crc kubenswrapper[4721]: I1202 12:16:24.185618 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f9214199-fedd-4dd1-8785-aab679b0178b-webhook-cert\") pod \"f9214199-fedd-4dd1-8785-aab679b0178b\" (UID: \"f9214199-fedd-4dd1-8785-aab679b0178b\") " Dec 02 12:16:24 crc kubenswrapper[4721]: I1202 12:16:24.185653 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmqfz\" (UniqueName: \"kubernetes.io/projected/f9214199-fedd-4dd1-8785-aab679b0178b-kube-api-access-qmqfz\") pod \"f9214199-fedd-4dd1-8785-aab679b0178b\" (UID: \"f9214199-fedd-4dd1-8785-aab679b0178b\") " Dec 02 12:16:24 crc kubenswrapper[4721]: I1202 12:16:24.191240 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9214199-fedd-4dd1-8785-aab679b0178b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "f9214199-fedd-4dd1-8785-aab679b0178b" (UID: "f9214199-fedd-4dd1-8785-aab679b0178b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:16:24 crc kubenswrapper[4721]: I1202 12:16:24.191953 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9214199-fedd-4dd1-8785-aab679b0178b-kube-api-access-qmqfz" (OuterVolumeSpecName: "kube-api-access-qmqfz") pod "f9214199-fedd-4dd1-8785-aab679b0178b" (UID: "f9214199-fedd-4dd1-8785-aab679b0178b"). InnerVolumeSpecName "kube-api-access-qmqfz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:16:24 crc kubenswrapper[4721]: I1202 12:16:24.208387 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9214199-fedd-4dd1-8785-aab679b0178b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "f9214199-fedd-4dd1-8785-aab679b0178b" (UID: "f9214199-fedd-4dd1-8785-aab679b0178b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:16:24 crc kubenswrapper[4721]: I1202 12:16:24.287083 4721 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f9214199-fedd-4dd1-8785-aab679b0178b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:24 crc kubenswrapper[4721]: I1202 12:16:24.287114 4721 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f9214199-fedd-4dd1-8785-aab679b0178b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:24 crc kubenswrapper[4721]: I1202 12:16:24.287124 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmqfz\" (UniqueName: \"kubernetes.io/projected/f9214199-fedd-4dd1-8785-aab679b0178b-kube-api-access-qmqfz\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:24 crc kubenswrapper[4721]: I1202 12:16:24.366295 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-index-tmvr4" Dec 02 12:16:24 crc kubenswrapper[4721]: I1202 12:16:24.489379 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4ckxw\" (UniqueName: \"kubernetes.io/projected/89b339ea-d21f-4d65-bb93-926e793838ef-kube-api-access-4ckxw\") pod \"89b339ea-d21f-4d65-bb93-926e793838ef\" (UID: \"89b339ea-d21f-4d65-bb93-926e793838ef\") " Dec 02 12:16:24 crc kubenswrapper[4721]: E1202 12:16:24.489784 4721 configmap.go:193] Couldn't get configMap swift-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Dec 02 12:16:24 crc kubenswrapper[4721]: E1202 12:16:24.489844 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7349d6cc-58af-4d67-a865-31d8409c29cd-operator-scripts podName:7349d6cc-58af-4d67-a865-31d8409c29cd nodeName:}" failed. No retries permitted until 2025-12-02 12:16:32.489828591 +0000 UTC m=+1522.228378940 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/7349d6cc-58af-4d67-a865-31d8409c29cd-operator-scripts") pod "keystone1088-account-delete-vlzc9" (UID: "7349d6cc-58af-4d67-a865-31d8409c29cd") : configmap "openstack-scripts" not found Dec 02 12:16:24 crc kubenswrapper[4721]: I1202 12:16:24.492199 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89b339ea-d21f-4d65-bb93-926e793838ef-kube-api-access-4ckxw" (OuterVolumeSpecName: "kube-api-access-4ckxw") pod "89b339ea-d21f-4d65-bb93-926e793838ef" (UID: "89b339ea-d21f-4d65-bb93-926e793838ef"). InnerVolumeSpecName "kube-api-access-4ckxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:16:24 crc kubenswrapper[4721]: I1202 12:16:24.590971 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4ckxw\" (UniqueName: \"kubernetes.io/projected/89b339ea-d21f-4d65-bb93-926e793838ef-kube-api-access-4ckxw\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:24 crc kubenswrapper[4721]: I1202 12:16:24.848785 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="317859f7-dacf-4980-ac45-8d39758ac1cd" path="/var/lib/kubelet/pods/317859f7-dacf-4980-ac45-8d39758ac1cd/volumes" Dec 02 12:16:25 crc kubenswrapper[4721]: I1202 12:16:25.040791 4721 generic.go:334] "Generic (PLEG): container finished" podID="89b339ea-d21f-4d65-bb93-926e793838ef" containerID="a1ec8b2f2103a8ef24ac54eb8c030ec2dcf7d68467f9ea7996722b3eeb6ab815" exitCode=0 Dec 02 12:16:25 crc kubenswrapper[4721]: I1202 12:16:25.041111 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7cf47c6f5c-rw8nh" Dec 02 12:16:25 crc kubenswrapper[4721]: I1202 12:16:25.041658 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-index-tmvr4" Dec 02 12:16:25 crc kubenswrapper[4721]: I1202 12:16:25.041891 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-index-tmvr4" event={"ID":"89b339ea-d21f-4d65-bb93-926e793838ef","Type":"ContainerDied","Data":"a1ec8b2f2103a8ef24ac54eb8c030ec2dcf7d68467f9ea7996722b3eeb6ab815"} Dec 02 12:16:25 crc kubenswrapper[4721]: I1202 12:16:25.041935 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-index-tmvr4" event={"ID":"89b339ea-d21f-4d65-bb93-926e793838ef","Type":"ContainerDied","Data":"f30c576a3682a82f8dbfaf4d4b6436aa2613cba8247ce8bd73cf53f37fc14c98"} Dec 02 12:16:25 crc kubenswrapper[4721]: I1202 12:16:25.041955 4721 scope.go:117] "RemoveContainer" containerID="a1ec8b2f2103a8ef24ac54eb8c030ec2dcf7d68467f9ea7996722b3eeb6ab815" Dec 02 12:16:25 crc kubenswrapper[4721]: I1202 12:16:25.060704 4721 scope.go:117] "RemoveContainer" containerID="a1ec8b2f2103a8ef24ac54eb8c030ec2dcf7d68467f9ea7996722b3eeb6ab815" Dec 02 12:16:25 crc kubenswrapper[4721]: E1202 12:16:25.064976 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1ec8b2f2103a8ef24ac54eb8c030ec2dcf7d68467f9ea7996722b3eeb6ab815\": container with ID starting with a1ec8b2f2103a8ef24ac54eb8c030ec2dcf7d68467f9ea7996722b3eeb6ab815 not found: ID does not exist" containerID="a1ec8b2f2103a8ef24ac54eb8c030ec2dcf7d68467f9ea7996722b3eeb6ab815" Dec 02 12:16:25 crc kubenswrapper[4721]: I1202 12:16:25.065011 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1ec8b2f2103a8ef24ac54eb8c030ec2dcf7d68467f9ea7996722b3eeb6ab815"} err="failed to get container status \"a1ec8b2f2103a8ef24ac54eb8c030ec2dcf7d68467f9ea7996722b3eeb6ab815\": rpc error: code = NotFound desc = could not find container \"a1ec8b2f2103a8ef24ac54eb8c030ec2dcf7d68467f9ea7996722b3eeb6ab815\": container with ID starting with a1ec8b2f2103a8ef24ac54eb8c030ec2dcf7d68467f9ea7996722b3eeb6ab815 not found: ID does not exist" Dec 02 12:16:25 crc kubenswrapper[4721]: I1202 12:16:25.066139 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7cf47c6f5c-rw8nh"] Dec 02 12:16:25 crc kubenswrapper[4721]: I1202 12:16:25.070898 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7cf47c6f5c-rw8nh"] Dec 02 12:16:25 crc kubenswrapper[4721]: I1202 12:16:25.075863 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/barbican-operator-index-tmvr4"] Dec 02 12:16:25 crc kubenswrapper[4721]: I1202 12:16:25.098869 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/barbican-operator-index-tmvr4"] Dec 02 12:16:26 crc kubenswrapper[4721]: I1202 12:16:26.847814 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89b339ea-d21f-4d65-bb93-926e793838ef" path="/var/lib/kubelet/pods/89b339ea-d21f-4d65-bb93-926e793838ef/volumes" Dec 02 12:16:26 crc kubenswrapper[4721]: I1202 12:16:26.848631 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9214199-fedd-4dd1-8785-aab679b0178b" path="/var/lib/kubelet/pods/f9214199-fedd-4dd1-8785-aab679b0178b/volumes" Dec 02 12:16:28 crc kubenswrapper[4721]: I1202 12:16:28.410517 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-77bbd48959-gdffp"] Dec 02 12:16:28 crc kubenswrapper[4721]: I1202 12:16:28.410761 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/keystone-operator-controller-manager-77bbd48959-gdffp" podUID="b79c462f-8fbc-41aa-9f98-c00c17ff9705" containerName="manager" containerID="cri-o://8d2a18f36234e4505df388a3d89d4d8b09ccfbcca43b497db7586e886a614f2b" gracePeriod=10 Dec 02 12:16:28 crc kubenswrapper[4721]: I1202 12:16:28.689154 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-index-4f9gs"] Dec 02 12:16:28 crc kubenswrapper[4721]: I1202 12:16:28.689411 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/keystone-operator-index-4f9gs" podUID="b7499522-49b1-4362-a257-50c2d2bf3bdb" containerName="registry-server" containerID="cri-o://90b864ac038bfb49dd40394b0f414f074eae493a77e3c2bcb1c4c7072cd1e089" gracePeriod=30 Dec 02 12:16:28 crc kubenswrapper[4721]: I1202 12:16:28.716632 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d"] Dec 02 12:16:28 crc kubenswrapper[4721]: I1202 12:16:28.721216 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/49c083020ae5dfe237b73a6c6b807501660a323f061d7879268c43a1214xt7d"] Dec 02 12:16:28 crc kubenswrapper[4721]: I1202 12:16:28.848406 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f708318e-7bf1-4974-90cb-1b800029af79" path="/var/lib/kubelet/pods/f708318e-7bf1-4974-90cb-1b800029af79/volumes" Dec 02 12:16:28 crc kubenswrapper[4721]: I1202 12:16:28.852808 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-77bbd48959-gdffp" Dec 02 12:16:28 crc kubenswrapper[4721]: I1202 12:16:28.967758 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b79c462f-8fbc-41aa-9f98-c00c17ff9705-webhook-cert\") pod \"b79c462f-8fbc-41aa-9f98-c00c17ff9705\" (UID: \"b79c462f-8fbc-41aa-9f98-c00c17ff9705\") " Dec 02 12:16:28 crc kubenswrapper[4721]: I1202 12:16:28.967798 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b79c462f-8fbc-41aa-9f98-c00c17ff9705-apiservice-cert\") pod \"b79c462f-8fbc-41aa-9f98-c00c17ff9705\" (UID: \"b79c462f-8fbc-41aa-9f98-c00c17ff9705\") " Dec 02 12:16:28 crc kubenswrapper[4721]: I1202 12:16:28.967860 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7vvqk\" (UniqueName: \"kubernetes.io/projected/b79c462f-8fbc-41aa-9f98-c00c17ff9705-kube-api-access-7vvqk\") pod \"b79c462f-8fbc-41aa-9f98-c00c17ff9705\" (UID: \"b79c462f-8fbc-41aa-9f98-c00c17ff9705\") " Dec 02 12:16:28 crc kubenswrapper[4721]: I1202 12:16:28.990862 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b79c462f-8fbc-41aa-9f98-c00c17ff9705-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "b79c462f-8fbc-41aa-9f98-c00c17ff9705" (UID: "b79c462f-8fbc-41aa-9f98-c00c17ff9705"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:16:28 crc kubenswrapper[4721]: I1202 12:16:28.990989 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b79c462f-8fbc-41aa-9f98-c00c17ff9705-kube-api-access-7vvqk" (OuterVolumeSpecName: "kube-api-access-7vvqk") pod "b79c462f-8fbc-41aa-9f98-c00c17ff9705" (UID: "b79c462f-8fbc-41aa-9f98-c00c17ff9705"). InnerVolumeSpecName "kube-api-access-7vvqk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:16:28 crc kubenswrapper[4721]: I1202 12:16:28.991952 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b79c462f-8fbc-41aa-9f98-c00c17ff9705-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "b79c462f-8fbc-41aa-9f98-c00c17ff9705" (UID: "b79c462f-8fbc-41aa-9f98-c00c17ff9705"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:16:29 crc kubenswrapper[4721]: I1202 12:16:29.039243 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-4f9gs" Dec 02 12:16:29 crc kubenswrapper[4721]: I1202 12:16:29.067922 4721 generic.go:334] "Generic (PLEG): container finished" podID="b79c462f-8fbc-41aa-9f98-c00c17ff9705" containerID="8d2a18f36234e4505df388a3d89d4d8b09ccfbcca43b497db7586e886a614f2b" exitCode=0 Dec 02 12:16:29 crc kubenswrapper[4721]: I1202 12:16:29.067977 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-77bbd48959-gdffp" event={"ID":"b79c462f-8fbc-41aa-9f98-c00c17ff9705","Type":"ContainerDied","Data":"8d2a18f36234e4505df388a3d89d4d8b09ccfbcca43b497db7586e886a614f2b"} Dec 02 12:16:29 crc kubenswrapper[4721]: I1202 12:16:29.068002 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-77bbd48959-gdffp" event={"ID":"b79c462f-8fbc-41aa-9f98-c00c17ff9705","Type":"ContainerDied","Data":"2c0c77ef3c312ab246dedc520d3148e106fe65d870a44dd516f6f76ed01e6bfb"} Dec 02 12:16:29 crc kubenswrapper[4721]: I1202 12:16:29.068017 4721 scope.go:117] "RemoveContainer" containerID="8d2a18f36234e4505df388a3d89d4d8b09ccfbcca43b497db7586e886a614f2b" Dec 02 12:16:29 crc kubenswrapper[4721]: I1202 12:16:29.068107 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-77bbd48959-gdffp" Dec 02 12:16:29 crc kubenswrapper[4721]: I1202 12:16:29.069128 4721 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b79c462f-8fbc-41aa-9f98-c00c17ff9705-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:29 crc kubenswrapper[4721]: I1202 12:16:29.069150 4721 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b79c462f-8fbc-41aa-9f98-c00c17ff9705-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:29 crc kubenswrapper[4721]: I1202 12:16:29.069172 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7vvqk\" (UniqueName: \"kubernetes.io/projected/b79c462f-8fbc-41aa-9f98-c00c17ff9705-kube-api-access-7vvqk\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:29 crc kubenswrapper[4721]: I1202 12:16:29.073287 4721 generic.go:334] "Generic (PLEG): container finished" podID="b7499522-49b1-4362-a257-50c2d2bf3bdb" containerID="90b864ac038bfb49dd40394b0f414f074eae493a77e3c2bcb1c4c7072cd1e089" exitCode=0 Dec 02 12:16:29 crc kubenswrapper[4721]: I1202 12:16:29.073325 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-4f9gs" event={"ID":"b7499522-49b1-4362-a257-50c2d2bf3bdb","Type":"ContainerDied","Data":"90b864ac038bfb49dd40394b0f414f074eae493a77e3c2bcb1c4c7072cd1e089"} Dec 02 12:16:29 crc kubenswrapper[4721]: I1202 12:16:29.073366 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-4f9gs" event={"ID":"b7499522-49b1-4362-a257-50c2d2bf3bdb","Type":"ContainerDied","Data":"222b8bd358c288191b4c22f099e95fb6f6178638715433f93ee452263753b16d"} Dec 02 12:16:29 crc kubenswrapper[4721]: I1202 12:16:29.073425 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-4f9gs" Dec 02 12:16:29 crc kubenswrapper[4721]: I1202 12:16:29.094831 4721 scope.go:117] "RemoveContainer" containerID="8d2a18f36234e4505df388a3d89d4d8b09ccfbcca43b497db7586e886a614f2b" Dec 02 12:16:29 crc kubenswrapper[4721]: E1202 12:16:29.095919 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d2a18f36234e4505df388a3d89d4d8b09ccfbcca43b497db7586e886a614f2b\": container with ID starting with 8d2a18f36234e4505df388a3d89d4d8b09ccfbcca43b497db7586e886a614f2b not found: ID does not exist" containerID="8d2a18f36234e4505df388a3d89d4d8b09ccfbcca43b497db7586e886a614f2b" Dec 02 12:16:29 crc kubenswrapper[4721]: I1202 12:16:29.095956 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d2a18f36234e4505df388a3d89d4d8b09ccfbcca43b497db7586e886a614f2b"} err="failed to get container status \"8d2a18f36234e4505df388a3d89d4d8b09ccfbcca43b497db7586e886a614f2b\": rpc error: code = NotFound desc = could not find container \"8d2a18f36234e4505df388a3d89d4d8b09ccfbcca43b497db7586e886a614f2b\": container with ID starting with 8d2a18f36234e4505df388a3d89d4d8b09ccfbcca43b497db7586e886a614f2b not found: ID does not exist" Dec 02 12:16:29 crc kubenswrapper[4721]: I1202 12:16:29.095982 4721 scope.go:117] "RemoveContainer" containerID="90b864ac038bfb49dd40394b0f414f074eae493a77e3c2bcb1c4c7072cd1e089" Dec 02 12:16:29 crc kubenswrapper[4721]: I1202 12:16:29.102447 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-77bbd48959-gdffp"] Dec 02 12:16:29 crc kubenswrapper[4721]: I1202 12:16:29.107558 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-77bbd48959-gdffp"] Dec 02 12:16:29 crc kubenswrapper[4721]: I1202 12:16:29.115028 4721 scope.go:117] "RemoveContainer" containerID="90b864ac038bfb49dd40394b0f414f074eae493a77e3c2bcb1c4c7072cd1e089" Dec 02 12:16:29 crc kubenswrapper[4721]: E1202 12:16:29.115617 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90b864ac038bfb49dd40394b0f414f074eae493a77e3c2bcb1c4c7072cd1e089\": container with ID starting with 90b864ac038bfb49dd40394b0f414f074eae493a77e3c2bcb1c4c7072cd1e089 not found: ID does not exist" containerID="90b864ac038bfb49dd40394b0f414f074eae493a77e3c2bcb1c4c7072cd1e089" Dec 02 12:16:29 crc kubenswrapper[4721]: I1202 12:16:29.115658 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90b864ac038bfb49dd40394b0f414f074eae493a77e3c2bcb1c4c7072cd1e089"} err="failed to get container status \"90b864ac038bfb49dd40394b0f414f074eae493a77e3c2bcb1c4c7072cd1e089\": rpc error: code = NotFound desc = could not find container \"90b864ac038bfb49dd40394b0f414f074eae493a77e3c2bcb1c4c7072cd1e089\": container with ID starting with 90b864ac038bfb49dd40394b0f414f074eae493a77e3c2bcb1c4c7072cd1e089 not found: ID does not exist" Dec 02 12:16:29 crc kubenswrapper[4721]: I1202 12:16:29.170022 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4pd8s\" (UniqueName: \"kubernetes.io/projected/b7499522-49b1-4362-a257-50c2d2bf3bdb-kube-api-access-4pd8s\") pod \"b7499522-49b1-4362-a257-50c2d2bf3bdb\" (UID: \"b7499522-49b1-4362-a257-50c2d2bf3bdb\") " Dec 02 12:16:29 crc kubenswrapper[4721]: I1202 12:16:29.174171 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7499522-49b1-4362-a257-50c2d2bf3bdb-kube-api-access-4pd8s" (OuterVolumeSpecName: "kube-api-access-4pd8s") pod "b7499522-49b1-4362-a257-50c2d2bf3bdb" (UID: "b7499522-49b1-4362-a257-50c2d2bf3bdb"). InnerVolumeSpecName "kube-api-access-4pd8s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:16:29 crc kubenswrapper[4721]: I1202 12:16:29.271881 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4pd8s\" (UniqueName: \"kubernetes.io/projected/b7499522-49b1-4362-a257-50c2d2bf3bdb-kube-api-access-4pd8s\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:29 crc kubenswrapper[4721]: I1202 12:16:29.398559 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-index-4f9gs"] Dec 02 12:16:29 crc kubenswrapper[4721]: I1202 12:16:29.402667 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/keystone-operator-index-4f9gs"] Dec 02 12:16:30 crc kubenswrapper[4721]: I1202 12:16:30.634998 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-wgmmg"] Dec 02 12:16:30 crc kubenswrapper[4721]: I1202 12:16:30.635224 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-wgmmg" podUID="446fee83-175f-44a1-b4a5-24f8150e2d43" containerName="operator" containerID="cri-o://2da6a71fca99c11864a01378be7ed54675690966ed484cd16e07dfb1655611ad" gracePeriod=10 Dec 02 12:16:30 crc kubenswrapper[4721]: I1202 12:16:30.846812 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7499522-49b1-4362-a257-50c2d2bf3bdb" path="/var/lib/kubelet/pods/b7499522-49b1-4362-a257-50c2d2bf3bdb/volumes" Dec 02 12:16:30 crc kubenswrapper[4721]: I1202 12:16:30.847785 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b79c462f-8fbc-41aa-9f98-c00c17ff9705" path="/var/lib/kubelet/pods/b79c462f-8fbc-41aa-9f98-c00c17ff9705/volumes" Dec 02 12:16:30 crc kubenswrapper[4721]: I1202 12:16:30.946833 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-fvc2h"] Dec 02 12:16:30 crc kubenswrapper[4721]: I1202 12:16:30.947059 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/rabbitmq-cluster-operator-index-fvc2h" podUID="7381a93e-ca9b-4d2b-ae20-c110ebab577a" containerName="registry-server" containerID="cri-o://3d1a4dff284a80185eeed769405d41326b0cc27bbb4d82ff51c16da5c57a9ef6" gracePeriod=30 Dec 02 12:16:30 crc kubenswrapper[4721]: I1202 12:16:30.978310 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll"] Dec 02 12:16:30 crc kubenswrapper[4721]: I1202 12:16:30.983101 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590f4xll"] Dec 02 12:16:31 crc kubenswrapper[4721]: I1202 12:16:31.096240 4721 generic.go:334] "Generic (PLEG): container finished" podID="446fee83-175f-44a1-b4a5-24f8150e2d43" containerID="2da6a71fca99c11864a01378be7ed54675690966ed484cd16e07dfb1655611ad" exitCode=0 Dec 02 12:16:31 crc kubenswrapper[4721]: I1202 12:16:31.096373 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-wgmmg" event={"ID":"446fee83-175f-44a1-b4a5-24f8150e2d43","Type":"ContainerDied","Data":"2da6a71fca99c11864a01378be7ed54675690966ed484cd16e07dfb1655611ad"} Dec 02 12:16:31 crc kubenswrapper[4721]: I1202 12:16:31.101222 4721 generic.go:334] "Generic (PLEG): container finished" podID="7381a93e-ca9b-4d2b-ae20-c110ebab577a" containerID="3d1a4dff284a80185eeed769405d41326b0cc27bbb4d82ff51c16da5c57a9ef6" exitCode=0 Dec 02 12:16:31 crc kubenswrapper[4721]: I1202 12:16:31.101261 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-fvc2h" event={"ID":"7381a93e-ca9b-4d2b-ae20-c110ebab577a","Type":"ContainerDied","Data":"3d1a4dff284a80185eeed769405d41326b0cc27bbb4d82ff51c16da5c57a9ef6"} Dec 02 12:16:31 crc kubenswrapper[4721]: I1202 12:16:31.151202 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-wgmmg" Dec 02 12:16:31 crc kubenswrapper[4721]: I1202 12:16:31.298877 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zvzjx\" (UniqueName: \"kubernetes.io/projected/446fee83-175f-44a1-b4a5-24f8150e2d43-kube-api-access-zvzjx\") pod \"446fee83-175f-44a1-b4a5-24f8150e2d43\" (UID: \"446fee83-175f-44a1-b4a5-24f8150e2d43\") " Dec 02 12:16:31 crc kubenswrapper[4721]: I1202 12:16:31.303905 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/446fee83-175f-44a1-b4a5-24f8150e2d43-kube-api-access-zvzjx" (OuterVolumeSpecName: "kube-api-access-zvzjx") pod "446fee83-175f-44a1-b4a5-24f8150e2d43" (UID: "446fee83-175f-44a1-b4a5-24f8150e2d43"). InnerVolumeSpecName "kube-api-access-zvzjx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:16:31 crc kubenswrapper[4721]: I1202 12:16:31.331607 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-fvc2h" Dec 02 12:16:31 crc kubenswrapper[4721]: I1202 12:16:31.400428 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zvzjx\" (UniqueName: \"kubernetes.io/projected/446fee83-175f-44a1-b4a5-24f8150e2d43-kube-api-access-zvzjx\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:31 crc kubenswrapper[4721]: I1202 12:16:31.501469 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jttxk\" (UniqueName: \"kubernetes.io/projected/7381a93e-ca9b-4d2b-ae20-c110ebab577a-kube-api-access-jttxk\") pod \"7381a93e-ca9b-4d2b-ae20-c110ebab577a\" (UID: \"7381a93e-ca9b-4d2b-ae20-c110ebab577a\") " Dec 02 12:16:31 crc kubenswrapper[4721]: I1202 12:16:31.504524 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7381a93e-ca9b-4d2b-ae20-c110ebab577a-kube-api-access-jttxk" (OuterVolumeSpecName: "kube-api-access-jttxk") pod "7381a93e-ca9b-4d2b-ae20-c110ebab577a" (UID: "7381a93e-ca9b-4d2b-ae20-c110ebab577a"). InnerVolumeSpecName "kube-api-access-jttxk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:16:31 crc kubenswrapper[4721]: I1202 12:16:31.602411 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jttxk\" (UniqueName: \"kubernetes.io/projected/7381a93e-ca9b-4d2b-ae20-c110ebab577a-kube-api-access-jttxk\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:32 crc kubenswrapper[4721]: I1202 12:16:32.110253 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-wgmmg" event={"ID":"446fee83-175f-44a1-b4a5-24f8150e2d43","Type":"ContainerDied","Data":"39a1488744b97037e55b4edf9fbbecdfb11e095d93151f152689657f0f18dc26"} Dec 02 12:16:32 crc kubenswrapper[4721]: I1202 12:16:32.110311 4721 scope.go:117] "RemoveContainer" containerID="2da6a71fca99c11864a01378be7ed54675690966ed484cd16e07dfb1655611ad" Dec 02 12:16:32 crc kubenswrapper[4721]: I1202 12:16:32.110265 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-wgmmg" Dec 02 12:16:32 crc kubenswrapper[4721]: I1202 12:16:32.111758 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-fvc2h" event={"ID":"7381a93e-ca9b-4d2b-ae20-c110ebab577a","Type":"ContainerDied","Data":"d8d98afd4a885f92f307460653184f953a18d406f5aa18c508631d006a83aa6e"} Dec 02 12:16:32 crc kubenswrapper[4721]: I1202 12:16:32.111798 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-fvc2h" Dec 02 12:16:32 crc kubenswrapper[4721]: I1202 12:16:32.133822 4721 scope.go:117] "RemoveContainer" containerID="3d1a4dff284a80185eeed769405d41326b0cc27bbb4d82ff51c16da5c57a9ef6" Dec 02 12:16:32 crc kubenswrapper[4721]: I1202 12:16:32.147311 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-fvc2h"] Dec 02 12:16:32 crc kubenswrapper[4721]: I1202 12:16:32.155119 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-fvc2h"] Dec 02 12:16:32 crc kubenswrapper[4721]: I1202 12:16:32.159103 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-wgmmg"] Dec 02 12:16:32 crc kubenswrapper[4721]: I1202 12:16:32.162384 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-wgmmg"] Dec 02 12:16:33 crc kubenswrapper[4721]: E1202 12:16:33.042266 4721 configmap.go:193] Couldn't get configMap swift-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Dec 02 12:16:33 crc kubenswrapper[4721]: E1202 12:16:33.042352 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7349d6cc-58af-4d67-a865-31d8409c29cd-operator-scripts podName:7349d6cc-58af-4d67-a865-31d8409c29cd nodeName:}" failed. No retries permitted until 2025-12-02 12:16:49.042320664 +0000 UTC m=+1538.780871013 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/7349d6cc-58af-4d67-a865-31d8409c29cd-operator-scripts") pod "keystone1088-account-delete-vlzc9" (UID: "7349d6cc-58af-4d67-a865-31d8409c29cd") : configmap "openstack-scripts" not found Dec 02 12:16:33 crc kubenswrapper[4721]: I1202 12:16:33.054957 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="446fee83-175f-44a1-b4a5-24f8150e2d43" path="/var/lib/kubelet/pods/446fee83-175f-44a1-b4a5-24f8150e2d43/volumes" Dec 02 12:16:33 crc kubenswrapper[4721]: I1202 12:16:33.056061 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7381a93e-ca9b-4d2b-ae20-c110ebab577a" path="/var/lib/kubelet/pods/7381a93e-ca9b-4d2b-ae20-c110ebab577a/volumes" Dec 02 12:16:33 crc kubenswrapper[4721]: I1202 12:16:33.058405 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8097003-f4f3-4ca3-9b65-118f447c3d48" path="/var/lib/kubelet/pods/b8097003-f4f3-4ca3-9b65-118f447c3d48/volumes" Dec 02 12:16:34 crc kubenswrapper[4721]: I1202 12:16:34.435502 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99"] Dec 02 12:16:34 crc kubenswrapper[4721]: I1202 12:16:34.436036 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99" podUID="6259f8fd-9c16-4c09-8e11-213914fe35f8" containerName="manager" containerID="cri-o://f11050a9bb524ee1e05fd9a617a9605570aaea889638a62b9f51322be9a9b42e" gracePeriod=10 Dec 02 12:16:34 crc kubenswrapper[4721]: I1202 12:16:34.436398 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99" podUID="6259f8fd-9c16-4c09-8e11-213914fe35f8" containerName="kube-rbac-proxy" containerID="cri-o://31b7e70f711b9faa91ebc381cae9bae3d03266a9cb60bf7e546419450ba1d737" gracePeriod=10 Dec 02 12:16:34 crc kubenswrapper[4721]: I1202 12:16:34.746407 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-hpzsg"] Dec 02 12:16:34 crc kubenswrapper[4721]: I1202 12:16:34.746869 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-index-hpzsg" podUID="bc87903c-e306-4c9c-a881-ed00eb454ae3" containerName="registry-server" containerID="cri-o://ef47091654862e7ed40d72295c813354132502e344efd1f309f27d5fd0132d0f" gracePeriod=30 Dec 02 12:16:34 crc kubenswrapper[4721]: I1202 12:16:34.777961 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9"] Dec 02 12:16:34 crc kubenswrapper[4721]: I1202 12:16:34.789002 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dl97z9"] Dec 02 12:16:34 crc kubenswrapper[4721]: I1202 12:16:34.848626 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40facf43-ee2e-42cd-b22a-ae0504c0be95" path="/var/lib/kubelet/pods/40facf43-ee2e-42cd-b22a-ae0504c0be95/volumes" Dec 02 12:16:34 crc kubenswrapper[4721]: I1202 12:16:34.857439 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99" Dec 02 12:16:34 crc kubenswrapper[4721]: I1202 12:16:34.967648 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6259f8fd-9c16-4c09-8e11-213914fe35f8-apiservice-cert\") pod \"6259f8fd-9c16-4c09-8e11-213914fe35f8\" (UID: \"6259f8fd-9c16-4c09-8e11-213914fe35f8\") " Dec 02 12:16:34 crc kubenswrapper[4721]: I1202 12:16:34.967736 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6259f8fd-9c16-4c09-8e11-213914fe35f8-webhook-cert\") pod \"6259f8fd-9c16-4c09-8e11-213914fe35f8\" (UID: \"6259f8fd-9c16-4c09-8e11-213914fe35f8\") " Dec 02 12:16:34 crc kubenswrapper[4721]: I1202 12:16:34.967774 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5568x\" (UniqueName: \"kubernetes.io/projected/6259f8fd-9c16-4c09-8e11-213914fe35f8-kube-api-access-5568x\") pod \"6259f8fd-9c16-4c09-8e11-213914fe35f8\" (UID: \"6259f8fd-9c16-4c09-8e11-213914fe35f8\") " Dec 02 12:16:34 crc kubenswrapper[4721]: I1202 12:16:34.972797 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6259f8fd-9c16-4c09-8e11-213914fe35f8-kube-api-access-5568x" (OuterVolumeSpecName: "kube-api-access-5568x") pod "6259f8fd-9c16-4c09-8e11-213914fe35f8" (UID: "6259f8fd-9c16-4c09-8e11-213914fe35f8"). InnerVolumeSpecName "kube-api-access-5568x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:16:34 crc kubenswrapper[4721]: I1202 12:16:34.973480 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6259f8fd-9c16-4c09-8e11-213914fe35f8-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "6259f8fd-9c16-4c09-8e11-213914fe35f8" (UID: "6259f8fd-9c16-4c09-8e11-213914fe35f8"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:16:34 crc kubenswrapper[4721]: I1202 12:16:34.987542 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6259f8fd-9c16-4c09-8e11-213914fe35f8-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "6259f8fd-9c16-4c09-8e11-213914fe35f8" (UID: "6259f8fd-9c16-4c09-8e11-213914fe35f8"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.069155 4721 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6259f8fd-9c16-4c09-8e11-213914fe35f8-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.069198 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5568x\" (UniqueName: \"kubernetes.io/projected/6259f8fd-9c16-4c09-8e11-213914fe35f8-kube-api-access-5568x\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.069214 4721 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6259f8fd-9c16-4c09-8e11-213914fe35f8-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.084473 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-hpzsg" Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.135127 4721 generic.go:334] "Generic (PLEG): container finished" podID="bc87903c-e306-4c9c-a881-ed00eb454ae3" containerID="ef47091654862e7ed40d72295c813354132502e344efd1f309f27d5fd0132d0f" exitCode=0 Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.135213 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-hpzsg" event={"ID":"bc87903c-e306-4c9c-a881-ed00eb454ae3","Type":"ContainerDied","Data":"ef47091654862e7ed40d72295c813354132502e344efd1f309f27d5fd0132d0f"} Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.135244 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-hpzsg" event={"ID":"bc87903c-e306-4c9c-a881-ed00eb454ae3","Type":"ContainerDied","Data":"9bfa86436c5ebe054226ba6710952f9730f15ae34d8bf7c9314f7ce3d9798995"} Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.135267 4721 scope.go:117] "RemoveContainer" containerID="ef47091654862e7ed40d72295c813354132502e344efd1f309f27d5fd0132d0f" Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.135439 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-hpzsg" Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.138634 4721 generic.go:334] "Generic (PLEG): container finished" podID="6259f8fd-9c16-4c09-8e11-213914fe35f8" containerID="31b7e70f711b9faa91ebc381cae9bae3d03266a9cb60bf7e546419450ba1d737" exitCode=0 Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.138659 4721 generic.go:334] "Generic (PLEG): container finished" podID="6259f8fd-9c16-4c09-8e11-213914fe35f8" containerID="f11050a9bb524ee1e05fd9a617a9605570aaea889638a62b9f51322be9a9b42e" exitCode=0 Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.138677 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99" event={"ID":"6259f8fd-9c16-4c09-8e11-213914fe35f8","Type":"ContainerDied","Data":"31b7e70f711b9faa91ebc381cae9bae3d03266a9cb60bf7e546419450ba1d737"} Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.138698 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99" event={"ID":"6259f8fd-9c16-4c09-8e11-213914fe35f8","Type":"ContainerDied","Data":"f11050a9bb524ee1e05fd9a617a9605570aaea889638a62b9f51322be9a9b42e"} Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.138711 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99" event={"ID":"6259f8fd-9c16-4c09-8e11-213914fe35f8","Type":"ContainerDied","Data":"ad163e3db09099121ed267db61b9d29929431b9e758bdebdf7692e4b66354366"} Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.138761 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99" Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.158108 4721 scope.go:117] "RemoveContainer" containerID="ef47091654862e7ed40d72295c813354132502e344efd1f309f27d5fd0132d0f" Dec 02 12:16:35 crc kubenswrapper[4721]: E1202 12:16:35.158574 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef47091654862e7ed40d72295c813354132502e344efd1f309f27d5fd0132d0f\": container with ID starting with ef47091654862e7ed40d72295c813354132502e344efd1f309f27d5fd0132d0f not found: ID does not exist" containerID="ef47091654862e7ed40d72295c813354132502e344efd1f309f27d5fd0132d0f" Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.158613 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef47091654862e7ed40d72295c813354132502e344efd1f309f27d5fd0132d0f"} err="failed to get container status \"ef47091654862e7ed40d72295c813354132502e344efd1f309f27d5fd0132d0f\": rpc error: code = NotFound desc = could not find container \"ef47091654862e7ed40d72295c813354132502e344efd1f309f27d5fd0132d0f\": container with ID starting with ef47091654862e7ed40d72295c813354132502e344efd1f309f27d5fd0132d0f not found: ID does not exist" Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.158644 4721 scope.go:117] "RemoveContainer" containerID="31b7e70f711b9faa91ebc381cae9bae3d03266a9cb60bf7e546419450ba1d737" Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.166792 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99"] Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.169813 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rlh68\" (UniqueName: \"kubernetes.io/projected/bc87903c-e306-4c9c-a881-ed00eb454ae3-kube-api-access-rlh68\") pod \"bc87903c-e306-4c9c-a881-ed00eb454ae3\" (UID: \"bc87903c-e306-4c9c-a881-ed00eb454ae3\") " Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.172274 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/infra-operator-controller-manager-77d9df5fdb-g6f99"] Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.177762 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc87903c-e306-4c9c-a881-ed00eb454ae3-kube-api-access-rlh68" (OuterVolumeSpecName: "kube-api-access-rlh68") pod "bc87903c-e306-4c9c-a881-ed00eb454ae3" (UID: "bc87903c-e306-4c9c-a881-ed00eb454ae3"). InnerVolumeSpecName "kube-api-access-rlh68". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.181327 4721 scope.go:117] "RemoveContainer" containerID="f11050a9bb524ee1e05fd9a617a9605570aaea889638a62b9f51322be9a9b42e" Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.200200 4721 scope.go:117] "RemoveContainer" containerID="31b7e70f711b9faa91ebc381cae9bae3d03266a9cb60bf7e546419450ba1d737" Dec 02 12:16:35 crc kubenswrapper[4721]: E1202 12:16:35.203372 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31b7e70f711b9faa91ebc381cae9bae3d03266a9cb60bf7e546419450ba1d737\": container with ID starting with 31b7e70f711b9faa91ebc381cae9bae3d03266a9cb60bf7e546419450ba1d737 not found: ID does not exist" containerID="31b7e70f711b9faa91ebc381cae9bae3d03266a9cb60bf7e546419450ba1d737" Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.203453 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31b7e70f711b9faa91ebc381cae9bae3d03266a9cb60bf7e546419450ba1d737"} err="failed to get container status \"31b7e70f711b9faa91ebc381cae9bae3d03266a9cb60bf7e546419450ba1d737\": rpc error: code = NotFound desc = could not find container \"31b7e70f711b9faa91ebc381cae9bae3d03266a9cb60bf7e546419450ba1d737\": container with ID starting with 31b7e70f711b9faa91ebc381cae9bae3d03266a9cb60bf7e546419450ba1d737 not found: ID does not exist" Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.203483 4721 scope.go:117] "RemoveContainer" containerID="f11050a9bb524ee1e05fd9a617a9605570aaea889638a62b9f51322be9a9b42e" Dec 02 12:16:35 crc kubenswrapper[4721]: E1202 12:16:35.204072 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f11050a9bb524ee1e05fd9a617a9605570aaea889638a62b9f51322be9a9b42e\": container with ID starting with f11050a9bb524ee1e05fd9a617a9605570aaea889638a62b9f51322be9a9b42e not found: ID does not exist" containerID="f11050a9bb524ee1e05fd9a617a9605570aaea889638a62b9f51322be9a9b42e" Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.204092 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f11050a9bb524ee1e05fd9a617a9605570aaea889638a62b9f51322be9a9b42e"} err="failed to get container status \"f11050a9bb524ee1e05fd9a617a9605570aaea889638a62b9f51322be9a9b42e\": rpc error: code = NotFound desc = could not find container \"f11050a9bb524ee1e05fd9a617a9605570aaea889638a62b9f51322be9a9b42e\": container with ID starting with f11050a9bb524ee1e05fd9a617a9605570aaea889638a62b9f51322be9a9b42e not found: ID does not exist" Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.204126 4721 scope.go:117] "RemoveContainer" containerID="31b7e70f711b9faa91ebc381cae9bae3d03266a9cb60bf7e546419450ba1d737" Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.204413 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31b7e70f711b9faa91ebc381cae9bae3d03266a9cb60bf7e546419450ba1d737"} err="failed to get container status \"31b7e70f711b9faa91ebc381cae9bae3d03266a9cb60bf7e546419450ba1d737\": rpc error: code = NotFound desc = could not find container \"31b7e70f711b9faa91ebc381cae9bae3d03266a9cb60bf7e546419450ba1d737\": container with ID starting with 31b7e70f711b9faa91ebc381cae9bae3d03266a9cb60bf7e546419450ba1d737 not found: ID does not exist" Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.204432 4721 scope.go:117] "RemoveContainer" containerID="f11050a9bb524ee1e05fd9a617a9605570aaea889638a62b9f51322be9a9b42e" Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.204716 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f11050a9bb524ee1e05fd9a617a9605570aaea889638a62b9f51322be9a9b42e"} err="failed to get container status \"f11050a9bb524ee1e05fd9a617a9605570aaea889638a62b9f51322be9a9b42e\": rpc error: code = NotFound desc = could not find container \"f11050a9bb524ee1e05fd9a617a9605570aaea889638a62b9f51322be9a9b42e\": container with ID starting with f11050a9bb524ee1e05fd9a617a9605570aaea889638a62b9f51322be9a9b42e not found: ID does not exist" Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.271874 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rlh68\" (UniqueName: \"kubernetes.io/projected/bc87903c-e306-4c9c-a881-ed00eb454ae3-kube-api-access-rlh68\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.471409 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-hpzsg"] Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.477087 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/infra-operator-index-hpzsg"] Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.865799 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-9f59d8d87-lgws5"] Dec 02 12:16:35 crc kubenswrapper[4721]: I1202 12:16:35.866024 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-controller-manager-9f59d8d87-lgws5" podUID="f1e8e66a-9b25-4d2a-94b7-afc09915748a" containerName="manager" containerID="cri-o://cf75139aee6cf879db1dac81cf7e3cddfc15c4af3d05ba15dfccb09a63a2bf8f" gracePeriod=10 Dec 02 12:16:36 crc kubenswrapper[4721]: I1202 12:16:36.178577 4721 generic.go:334] "Generic (PLEG): container finished" podID="f1e8e66a-9b25-4d2a-94b7-afc09915748a" containerID="cf75139aee6cf879db1dac81cf7e3cddfc15c4af3d05ba15dfccb09a63a2bf8f" exitCode=0 Dec 02 12:16:36 crc kubenswrapper[4721]: I1202 12:16:36.178665 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-9f59d8d87-lgws5" event={"ID":"f1e8e66a-9b25-4d2a-94b7-afc09915748a","Type":"ContainerDied","Data":"cf75139aee6cf879db1dac81cf7e3cddfc15c4af3d05ba15dfccb09a63a2bf8f"} Dec 02 12:16:36 crc kubenswrapper[4721]: I1202 12:16:36.184222 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-xcbcl"] Dec 02 12:16:36 crc kubenswrapper[4721]: I1202 12:16:36.184442 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-index-xcbcl" podUID="17f5a185-b59b-4fa4-9ae1-8f664d3d7a58" containerName="registry-server" containerID="cri-o://3f1af360ba915913c138671254da62c399e4397481c2a654afefa8511bc6d56a" gracePeriod=30 Dec 02 12:16:36 crc kubenswrapper[4721]: I1202 12:16:36.264387 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng"] Dec 02 12:16:36 crc kubenswrapper[4721]: I1202 12:16:36.267150 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/27e8bc079695f3aed52a6c5be68196d91a6230a1a03a8fc87a19aa534fzksng"] Dec 02 12:16:36 crc kubenswrapper[4721]: I1202 12:16:36.425121 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-9f59d8d87-lgws5" Dec 02 12:16:36 crc kubenswrapper[4721]: I1202 12:16:36.531613 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f1e8e66a-9b25-4d2a-94b7-afc09915748a-apiservice-cert\") pod \"f1e8e66a-9b25-4d2a-94b7-afc09915748a\" (UID: \"f1e8e66a-9b25-4d2a-94b7-afc09915748a\") " Dec 02 12:16:36 crc kubenswrapper[4721]: I1202 12:16:36.531659 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mgtgq\" (UniqueName: \"kubernetes.io/projected/f1e8e66a-9b25-4d2a-94b7-afc09915748a-kube-api-access-mgtgq\") pod \"f1e8e66a-9b25-4d2a-94b7-afc09915748a\" (UID: \"f1e8e66a-9b25-4d2a-94b7-afc09915748a\") " Dec 02 12:16:36 crc kubenswrapper[4721]: I1202 12:16:36.531803 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f1e8e66a-9b25-4d2a-94b7-afc09915748a-webhook-cert\") pod \"f1e8e66a-9b25-4d2a-94b7-afc09915748a\" (UID: \"f1e8e66a-9b25-4d2a-94b7-afc09915748a\") " Dec 02 12:16:36 crc kubenswrapper[4721]: I1202 12:16:36.536549 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1e8e66a-9b25-4d2a-94b7-afc09915748a-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "f1e8e66a-9b25-4d2a-94b7-afc09915748a" (UID: "f1e8e66a-9b25-4d2a-94b7-afc09915748a"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:16:36 crc kubenswrapper[4721]: I1202 12:16:36.537039 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1e8e66a-9b25-4d2a-94b7-afc09915748a-kube-api-access-mgtgq" (OuterVolumeSpecName: "kube-api-access-mgtgq") pod "f1e8e66a-9b25-4d2a-94b7-afc09915748a" (UID: "f1e8e66a-9b25-4d2a-94b7-afc09915748a"). InnerVolumeSpecName "kube-api-access-mgtgq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:16:36 crc kubenswrapper[4721]: I1202 12:16:36.548493 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1e8e66a-9b25-4d2a-94b7-afc09915748a-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "f1e8e66a-9b25-4d2a-94b7-afc09915748a" (UID: "f1e8e66a-9b25-4d2a-94b7-afc09915748a"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 12:16:36 crc kubenswrapper[4721]: I1202 12:16:36.633497 4721 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f1e8e66a-9b25-4d2a-94b7-afc09915748a-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:36 crc kubenswrapper[4721]: I1202 12:16:36.633766 4721 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f1e8e66a-9b25-4d2a-94b7-afc09915748a-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:36 crc kubenswrapper[4721]: I1202 12:16:36.633832 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mgtgq\" (UniqueName: \"kubernetes.io/projected/f1e8e66a-9b25-4d2a-94b7-afc09915748a-kube-api-access-mgtgq\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:36 crc kubenswrapper[4721]: I1202 12:16:36.860092 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6" path="/var/lib/kubelet/pods/1cb3b91a-ecd1-4bbe-9b36-87fc66dd81c6/volumes" Dec 02 12:16:36 crc kubenswrapper[4721]: I1202 12:16:36.860887 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6259f8fd-9c16-4c09-8e11-213914fe35f8" path="/var/lib/kubelet/pods/6259f8fd-9c16-4c09-8e11-213914fe35f8/volumes" Dec 02 12:16:36 crc kubenswrapper[4721]: I1202 12:16:36.861408 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc87903c-e306-4c9c-a881-ed00eb454ae3" path="/var/lib/kubelet/pods/bc87903c-e306-4c9c-a881-ed00eb454ae3/volumes" Dec 02 12:16:37 crc kubenswrapper[4721]: I1202 12:16:37.097749 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-xcbcl" Dec 02 12:16:37 crc kubenswrapper[4721]: I1202 12:16:37.211985 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-9f59d8d87-lgws5" event={"ID":"f1e8e66a-9b25-4d2a-94b7-afc09915748a","Type":"ContainerDied","Data":"0769ebfc7115727c42f4d7fd2e07244ea4a4aacf322832ecface7390f3412adc"} Dec 02 12:16:37 crc kubenswrapper[4721]: I1202 12:16:37.212016 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-9f59d8d87-lgws5" Dec 02 12:16:37 crc kubenswrapper[4721]: I1202 12:16:37.212372 4721 scope.go:117] "RemoveContainer" containerID="cf75139aee6cf879db1dac81cf7e3cddfc15c4af3d05ba15dfccb09a63a2bf8f" Dec 02 12:16:37 crc kubenswrapper[4721]: I1202 12:16:37.214479 4721 generic.go:334] "Generic (PLEG): container finished" podID="17f5a185-b59b-4fa4-9ae1-8f664d3d7a58" containerID="3f1af360ba915913c138671254da62c399e4397481c2a654afefa8511bc6d56a" exitCode=0 Dec 02 12:16:37 crc kubenswrapper[4721]: I1202 12:16:37.214577 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-xcbcl" Dec 02 12:16:37 crc kubenswrapper[4721]: I1202 12:16:37.214557 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-xcbcl" event={"ID":"17f5a185-b59b-4fa4-9ae1-8f664d3d7a58","Type":"ContainerDied","Data":"3f1af360ba915913c138671254da62c399e4397481c2a654afefa8511bc6d56a"} Dec 02 12:16:37 crc kubenswrapper[4721]: I1202 12:16:37.215464 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-xcbcl" event={"ID":"17f5a185-b59b-4fa4-9ae1-8f664d3d7a58","Type":"ContainerDied","Data":"418dfd6a1b3816b8e11735fea0668f1bf57bce4cb3176d7d18dc283cf66cd07e"} Dec 02 12:16:37 crc kubenswrapper[4721]: I1202 12:16:37.229847 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-9f59d8d87-lgws5"] Dec 02 12:16:37 crc kubenswrapper[4721]: I1202 12:16:37.233366 4721 scope.go:117] "RemoveContainer" containerID="3f1af360ba915913c138671254da62c399e4397481c2a654afefa8511bc6d56a" Dec 02 12:16:37 crc kubenswrapper[4721]: I1202 12:16:37.234831 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-9f59d8d87-lgws5"] Dec 02 12:16:37 crc kubenswrapper[4721]: I1202 12:16:37.245538 4721 scope.go:117] "RemoveContainer" containerID="3f1af360ba915913c138671254da62c399e4397481c2a654afefa8511bc6d56a" Dec 02 12:16:37 crc kubenswrapper[4721]: E1202 12:16:37.245997 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f1af360ba915913c138671254da62c399e4397481c2a654afefa8511bc6d56a\": container with ID starting with 3f1af360ba915913c138671254da62c399e4397481c2a654afefa8511bc6d56a not found: ID does not exist" containerID="3f1af360ba915913c138671254da62c399e4397481c2a654afefa8511bc6d56a" Dec 02 12:16:37 crc kubenswrapper[4721]: I1202 12:16:37.246044 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f1af360ba915913c138671254da62c399e4397481c2a654afefa8511bc6d56a"} err="failed to get container status \"3f1af360ba915913c138671254da62c399e4397481c2a654afefa8511bc6d56a\": rpc error: code = NotFound desc = could not find container \"3f1af360ba915913c138671254da62c399e4397481c2a654afefa8511bc6d56a\": container with ID starting with 3f1af360ba915913c138671254da62c399e4397481c2a654afefa8511bc6d56a not found: ID does not exist" Dec 02 12:16:37 crc kubenswrapper[4721]: I1202 12:16:37.251212 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-md7g6\" (UniqueName: \"kubernetes.io/projected/17f5a185-b59b-4fa4-9ae1-8f664d3d7a58-kube-api-access-md7g6\") pod \"17f5a185-b59b-4fa4-9ae1-8f664d3d7a58\" (UID: \"17f5a185-b59b-4fa4-9ae1-8f664d3d7a58\") " Dec 02 12:16:37 crc kubenswrapper[4721]: I1202 12:16:37.257105 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17f5a185-b59b-4fa4-9ae1-8f664d3d7a58-kube-api-access-md7g6" (OuterVolumeSpecName: "kube-api-access-md7g6") pod "17f5a185-b59b-4fa4-9ae1-8f664d3d7a58" (UID: "17f5a185-b59b-4fa4-9ae1-8f664d3d7a58"). InnerVolumeSpecName "kube-api-access-md7g6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:16:37 crc kubenswrapper[4721]: I1202 12:16:37.353148 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-md7g6\" (UniqueName: \"kubernetes.io/projected/17f5a185-b59b-4fa4-9ae1-8f664d3d7a58-kube-api-access-md7g6\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:37 crc kubenswrapper[4721]: I1202 12:16:37.541555 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-xcbcl"] Dec 02 12:16:37 crc kubenswrapper[4721]: I1202 12:16:37.545140 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-index-xcbcl"] Dec 02 12:16:38 crc kubenswrapper[4721]: I1202 12:16:38.848290 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17f5a185-b59b-4fa4-9ae1-8f664d3d7a58" path="/var/lib/kubelet/pods/17f5a185-b59b-4fa4-9ae1-8f664d3d7a58/volumes" Dec 02 12:16:38 crc kubenswrapper[4721]: I1202 12:16:38.849443 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1e8e66a-9b25-4d2a-94b7-afc09915748a" path="/var/lib/kubelet/pods/f1e8e66a-9b25-4d2a-94b7-afc09915748a/volumes" Dec 02 12:16:49 crc kubenswrapper[4721]: E1202 12:16:49.116264 4721 configmap.go:193] Couldn't get configMap swift-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Dec 02 12:16:49 crc kubenswrapper[4721]: E1202 12:16:49.116759 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7349d6cc-58af-4d67-a865-31d8409c29cd-operator-scripts podName:7349d6cc-58af-4d67-a865-31d8409c29cd nodeName:}" failed. No retries permitted until 2025-12-02 12:17:21.116745267 +0000 UTC m=+1570.855295616 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/7349d6cc-58af-4d67-a865-31d8409c29cd-operator-scripts") pod "keystone1088-account-delete-vlzc9" (UID: "7349d6cc-58af-4d67-a865-31d8409c29cd") : configmap "openstack-scripts" not found Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.393164 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-h5jq9/must-gather-r2trd"] Dec 02 12:16:50 crc kubenswrapper[4721]: E1202 12:16:50.393759 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94c0083a-0c9e-43fc-9ad3-46517e9b4adb" containerName="registry-server" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.393775 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="94c0083a-0c9e-43fc-9ad3-46517e9b4adb" containerName="registry-server" Dec 02 12:16:50 crc kubenswrapper[4721]: E1202 12:16:50.393790 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04a81817-b7ad-473c-8a11-a79c9e61bcb7" containerName="barbican-api-log" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.393798 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="04a81817-b7ad-473c-8a11-a79c9e61bcb7" containerName="barbican-api-log" Dec 02 12:16:50 crc kubenswrapper[4721]: E1202 12:16:50.393808 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46db9721-f25f-4725-b8ea-8eda2b7ce1b3" containerName="mysql-bootstrap" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.393816 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="46db9721-f25f-4725-b8ea-8eda2b7ce1b3" containerName="mysql-bootstrap" Dec 02 12:16:50 crc kubenswrapper[4721]: E1202 12:16:50.393826 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1e8e66a-9b25-4d2a-94b7-afc09915748a" containerName="manager" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.393834 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1e8e66a-9b25-4d2a-94b7-afc09915748a" containerName="manager" Dec 02 12:16:50 crc kubenswrapper[4721]: E1202 12:16:50.393849 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b79c462f-8fbc-41aa-9f98-c00c17ff9705" containerName="manager" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.393857 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b79c462f-8fbc-41aa-9f98-c00c17ff9705" containerName="manager" Dec 02 12:16:50 crc kubenswrapper[4721]: E1202 12:16:50.393868 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93" containerName="memcached" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.393876 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93" containerName="memcached" Dec 02 12:16:50 crc kubenswrapper[4721]: E1202 12:16:50.393887 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7381a93e-ca9b-4d2b-ae20-c110ebab577a" containerName="registry-server" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.393895 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7381a93e-ca9b-4d2b-ae20-c110ebab577a" containerName="registry-server" Dec 02 12:16:50 crc kubenswrapper[4721]: E1202 12:16:50.393905 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="446fee83-175f-44a1-b4a5-24f8150e2d43" containerName="operator" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.393913 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="446fee83-175f-44a1-b4a5-24f8150e2d43" containerName="operator" Dec 02 12:16:50 crc kubenswrapper[4721]: E1202 12:16:50.393922 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46db9721-f25f-4725-b8ea-8eda2b7ce1b3" containerName="galera" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.393930 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="46db9721-f25f-4725-b8ea-8eda2b7ce1b3" containerName="galera" Dec 02 12:16:50 crc kubenswrapper[4721]: E1202 12:16:50.393940 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17f5a185-b59b-4fa4-9ae1-8f664d3d7a58" containerName="registry-server" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.393947 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="17f5a185-b59b-4fa4-9ae1-8f664d3d7a58" containerName="registry-server" Dec 02 12:16:50 crc kubenswrapper[4721]: E1202 12:16:50.393961 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89b339ea-d21f-4d65-bb93-926e793838ef" containerName="registry-server" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.393969 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="89b339ea-d21f-4d65-bb93-926e793838ef" containerName="registry-server" Dec 02 12:16:50 crc kubenswrapper[4721]: E1202 12:16:50.393979 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="605042d5-6e07-4037-8a8c-0ca3da0d2ac3" containerName="keystone-api" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.393990 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="605042d5-6e07-4037-8a8c-0ca3da0d2ac3" containerName="keystone-api" Dec 02 12:16:50 crc kubenswrapper[4721]: E1202 12:16:50.394004 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89920af2-3af7-4428-80fa-ddf2ba97dfb9" containerName="setup-container" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394011 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="89920af2-3af7-4428-80fa-ddf2ba97dfb9" containerName="setup-container" Dec 02 12:16:50 crc kubenswrapper[4721]: E1202 12:16:50.394021 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89920af2-3af7-4428-80fa-ddf2ba97dfb9" containerName="rabbitmq" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394029 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="89920af2-3af7-4428-80fa-ddf2ba97dfb9" containerName="rabbitmq" Dec 02 12:16:50 crc kubenswrapper[4721]: E1202 12:16:50.394047 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69d9011a-8441-4afa-904b-706f5a0ffe6c" containerName="mysql-bootstrap" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394056 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="69d9011a-8441-4afa-904b-706f5a0ffe6c" containerName="mysql-bootstrap" Dec 02 12:16:50 crc kubenswrapper[4721]: E1202 12:16:50.394070 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55ef2351-e56c-4a46-8350-1ed9f1dae8c7" containerName="mariadb-account-delete" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394078 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="55ef2351-e56c-4a46-8350-1ed9f1dae8c7" containerName="mariadb-account-delete" Dec 02 12:16:50 crc kubenswrapper[4721]: E1202 12:16:50.394090 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6259f8fd-9c16-4c09-8e11-213914fe35f8" containerName="manager" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394099 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="6259f8fd-9c16-4c09-8e11-213914fe35f8" containerName="manager" Dec 02 12:16:50 crc kubenswrapper[4721]: E1202 12:16:50.394108 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc05df77-4dd2-4d22-87e8-7a0e9ff5181c" containerName="manager" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394115 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc05df77-4dd2-4d22-87e8-7a0e9ff5181c" containerName="manager" Dec 02 12:16:50 crc kubenswrapper[4721]: E1202 12:16:50.394127 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6259f8fd-9c16-4c09-8e11-213914fe35f8" containerName="kube-rbac-proxy" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394134 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="6259f8fd-9c16-4c09-8e11-213914fe35f8" containerName="kube-rbac-proxy" Dec 02 12:16:50 crc kubenswrapper[4721]: E1202 12:16:50.394143 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69d9011a-8441-4afa-904b-706f5a0ffe6c" containerName="galera" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394150 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="69d9011a-8441-4afa-904b-706f5a0ffe6c" containerName="galera" Dec 02 12:16:50 crc kubenswrapper[4721]: E1202 12:16:50.394158 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91d5a64b-5c90-4fba-9e6c-416071e412b3" containerName="galera" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394165 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="91d5a64b-5c90-4fba-9e6c-416071e412b3" containerName="galera" Dec 02 12:16:50 crc kubenswrapper[4721]: E1202 12:16:50.394176 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9214199-fedd-4dd1-8785-aab679b0178b" containerName="manager" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394183 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9214199-fedd-4dd1-8785-aab679b0178b" containerName="manager" Dec 02 12:16:50 crc kubenswrapper[4721]: E1202 12:16:50.394193 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca6ac23f-63bc-4601-abfd-d5a61402f51f" containerName="barbican-keystone-listener" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394202 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca6ac23f-63bc-4601-abfd-d5a61402f51f" containerName="barbican-keystone-listener" Dec 02 12:16:50 crc kubenswrapper[4721]: E1202 12:16:50.394215 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7499522-49b1-4362-a257-50c2d2bf3bdb" containerName="registry-server" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394222 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7499522-49b1-4362-a257-50c2d2bf3bdb" containerName="registry-server" Dec 02 12:16:50 crc kubenswrapper[4721]: E1202 12:16:50.394231 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91d5a64b-5c90-4fba-9e6c-416071e412b3" containerName="mysql-bootstrap" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394238 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="91d5a64b-5c90-4fba-9e6c-416071e412b3" containerName="mysql-bootstrap" Dec 02 12:16:50 crc kubenswrapper[4721]: E1202 12:16:50.394247 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca6ac23f-63bc-4601-abfd-d5a61402f51f" containerName="barbican-keystone-listener-log" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394255 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca6ac23f-63bc-4601-abfd-d5a61402f51f" containerName="barbican-keystone-listener-log" Dec 02 12:16:50 crc kubenswrapper[4721]: E1202 12:16:50.394267 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04a81817-b7ad-473c-8a11-a79c9e61bcb7" containerName="barbican-api" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394274 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="04a81817-b7ad-473c-8a11-a79c9e61bcb7" containerName="barbican-api" Dec 02 12:16:50 crc kubenswrapper[4721]: E1202 12:16:50.394286 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc87903c-e306-4c9c-a881-ed00eb454ae3" containerName="registry-server" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394294 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc87903c-e306-4c9c-a881-ed00eb454ae3" containerName="registry-server" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394444 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="94c0083a-0c9e-43fc-9ad3-46517e9b4adb" containerName="registry-server" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394461 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1e8e66a-9b25-4d2a-94b7-afc09915748a" containerName="manager" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394473 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="605042d5-6e07-4037-8a8c-0ca3da0d2ac3" containerName="keystone-api" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394483 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="17f5a185-b59b-4fa4-9ae1-8f664d3d7a58" containerName="registry-server" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394493 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca6ac23f-63bc-4601-abfd-d5a61402f51f" containerName="barbican-keystone-listener-log" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394506 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="91d5a64b-5c90-4fba-9e6c-416071e412b3" containerName="galera" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394514 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e5e1a63-a9a0-4ca1-bc31-f5863bdf5e93" containerName="memcached" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394525 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="46db9721-f25f-4725-b8ea-8eda2b7ce1b3" containerName="galera" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394531 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc05df77-4dd2-4d22-87e8-7a0e9ff5181c" containerName="manager" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394543 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="04a81817-b7ad-473c-8a11-a79c9e61bcb7" containerName="barbican-api-log" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394555 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="04a81817-b7ad-473c-8a11-a79c9e61bcb7" containerName="barbican-api" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394563 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="89b339ea-d21f-4d65-bb93-926e793838ef" containerName="registry-server" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394576 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="6259f8fd-9c16-4c09-8e11-213914fe35f8" containerName="manager" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394587 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="7381a93e-ca9b-4d2b-ae20-c110ebab577a" containerName="registry-server" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394603 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="89920af2-3af7-4428-80fa-ddf2ba97dfb9" containerName="rabbitmq" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394611 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="69d9011a-8441-4afa-904b-706f5a0ffe6c" containerName="galera" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394619 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9214199-fedd-4dd1-8785-aab679b0178b" containerName="manager" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394628 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b79c462f-8fbc-41aa-9f98-c00c17ff9705" containerName="manager" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394638 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="55ef2351-e56c-4a46-8350-1ed9f1dae8c7" containerName="mariadb-account-delete" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394646 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca6ac23f-63bc-4601-abfd-d5a61402f51f" containerName="barbican-keystone-listener" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394654 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7499522-49b1-4362-a257-50c2d2bf3bdb" containerName="registry-server" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394663 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="446fee83-175f-44a1-b4a5-24f8150e2d43" containerName="operator" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394671 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="6259f8fd-9c16-4c09-8e11-213914fe35f8" containerName="kube-rbac-proxy" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.394683 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc87903c-e306-4c9c-a881-ed00eb454ae3" containerName="registry-server" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.395443 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-h5jq9/must-gather-r2trd" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.398022 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-h5jq9"/"kube-root-ca.crt" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.411608 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-h5jq9/must-gather-r2trd"] Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.412450 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-h5jq9"/"openshift-service-ca.crt" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.532920 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fgsf\" (UniqueName: \"kubernetes.io/projected/e48d8132-fb2f-4162-9297-95a972e985e0-kube-api-access-5fgsf\") pod \"must-gather-r2trd\" (UID: \"e48d8132-fb2f-4162-9297-95a972e985e0\") " pod="openshift-must-gather-h5jq9/must-gather-r2trd" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.533199 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e48d8132-fb2f-4162-9297-95a972e985e0-must-gather-output\") pod \"must-gather-r2trd\" (UID: \"e48d8132-fb2f-4162-9297-95a972e985e0\") " pod="openshift-must-gather-h5jq9/must-gather-r2trd" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.634552 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fgsf\" (UniqueName: \"kubernetes.io/projected/e48d8132-fb2f-4162-9297-95a972e985e0-kube-api-access-5fgsf\") pod \"must-gather-r2trd\" (UID: \"e48d8132-fb2f-4162-9297-95a972e985e0\") " pod="openshift-must-gather-h5jq9/must-gather-r2trd" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.634610 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e48d8132-fb2f-4162-9297-95a972e985e0-must-gather-output\") pod \"must-gather-r2trd\" (UID: \"e48d8132-fb2f-4162-9297-95a972e985e0\") " pod="openshift-must-gather-h5jq9/must-gather-r2trd" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.635322 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e48d8132-fb2f-4162-9297-95a972e985e0-must-gather-output\") pod \"must-gather-r2trd\" (UID: \"e48d8132-fb2f-4162-9297-95a972e985e0\") " pod="openshift-must-gather-h5jq9/must-gather-r2trd" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.652520 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fgsf\" (UniqueName: \"kubernetes.io/projected/e48d8132-fb2f-4162-9297-95a972e985e0-kube-api-access-5fgsf\") pod \"must-gather-r2trd\" (UID: \"e48d8132-fb2f-4162-9297-95a972e985e0\") " pod="openshift-must-gather-h5jq9/must-gather-r2trd" Dec 02 12:16:50 crc kubenswrapper[4721]: I1202 12:16:50.712001 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-h5jq9/must-gather-r2trd" Dec 02 12:16:51 crc kubenswrapper[4721]: I1202 12:16:51.091551 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone1088-account-delete-vlzc9" Dec 02 12:16:51 crc kubenswrapper[4721]: I1202 12:16:51.098500 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-h5jq9/must-gather-r2trd"] Dec 02 12:16:51 crc kubenswrapper[4721]: I1202 12:16:51.242042 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7349d6cc-58af-4d67-a865-31d8409c29cd-operator-scripts\") pod \"7349d6cc-58af-4d67-a865-31d8409c29cd\" (UID: \"7349d6cc-58af-4d67-a865-31d8409c29cd\") " Dec 02 12:16:51 crc kubenswrapper[4721]: I1202 12:16:51.242167 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9h7s5\" (UniqueName: \"kubernetes.io/projected/7349d6cc-58af-4d67-a865-31d8409c29cd-kube-api-access-9h7s5\") pod \"7349d6cc-58af-4d67-a865-31d8409c29cd\" (UID: \"7349d6cc-58af-4d67-a865-31d8409c29cd\") " Dec 02 12:16:51 crc kubenswrapper[4721]: I1202 12:16:51.243170 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7349d6cc-58af-4d67-a865-31d8409c29cd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7349d6cc-58af-4d67-a865-31d8409c29cd" (UID: "7349d6cc-58af-4d67-a865-31d8409c29cd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 12:16:51 crc kubenswrapper[4721]: I1202 12:16:51.248626 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7349d6cc-58af-4d67-a865-31d8409c29cd-kube-api-access-9h7s5" (OuterVolumeSpecName: "kube-api-access-9h7s5") pod "7349d6cc-58af-4d67-a865-31d8409c29cd" (UID: "7349d6cc-58af-4d67-a865-31d8409c29cd"). InnerVolumeSpecName "kube-api-access-9h7s5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:16:51 crc kubenswrapper[4721]: I1202 12:16:51.317572 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-h5jq9/must-gather-r2trd" event={"ID":"e48d8132-fb2f-4162-9297-95a972e985e0","Type":"ContainerStarted","Data":"d2d2777e1019483dff58a52bae1a54f81a6a4af120610934a040c9a27d72f5b4"} Dec 02 12:16:51 crc kubenswrapper[4721]: I1202 12:16:51.318754 4721 generic.go:334] "Generic (PLEG): container finished" podID="7349d6cc-58af-4d67-a865-31d8409c29cd" containerID="cbdd31746d6f20e86a32ba99fee61c3e9dadc5f76ccd0ff940a41d5055c5aa11" exitCode=137 Dec 02 12:16:51 crc kubenswrapper[4721]: I1202 12:16:51.318783 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone1088-account-delete-vlzc9" event={"ID":"7349d6cc-58af-4d67-a865-31d8409c29cd","Type":"ContainerDied","Data":"cbdd31746d6f20e86a32ba99fee61c3e9dadc5f76ccd0ff940a41d5055c5aa11"} Dec 02 12:16:51 crc kubenswrapper[4721]: I1202 12:16:51.318842 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="swift-kuttl-tests/keystone1088-account-delete-vlzc9" event={"ID":"7349d6cc-58af-4d67-a865-31d8409c29cd","Type":"ContainerDied","Data":"9e23c88cd28bae65f7c7fcc8d6e481288c76aa71539a007e92ce51639af9d790"} Dec 02 12:16:51 crc kubenswrapper[4721]: I1202 12:16:51.318864 4721 scope.go:117] "RemoveContainer" containerID="cbdd31746d6f20e86a32ba99fee61c3e9dadc5f76ccd0ff940a41d5055c5aa11" Dec 02 12:16:51 crc kubenswrapper[4721]: I1202 12:16:51.318818 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="swift-kuttl-tests/keystone1088-account-delete-vlzc9" Dec 02 12:16:51 crc kubenswrapper[4721]: I1202 12:16:51.337685 4721 scope.go:117] "RemoveContainer" containerID="cbdd31746d6f20e86a32ba99fee61c3e9dadc5f76ccd0ff940a41d5055c5aa11" Dec 02 12:16:51 crc kubenswrapper[4721]: E1202 12:16:51.339023 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbdd31746d6f20e86a32ba99fee61c3e9dadc5f76ccd0ff940a41d5055c5aa11\": container with ID starting with cbdd31746d6f20e86a32ba99fee61c3e9dadc5f76ccd0ff940a41d5055c5aa11 not found: ID does not exist" containerID="cbdd31746d6f20e86a32ba99fee61c3e9dadc5f76ccd0ff940a41d5055c5aa11" Dec 02 12:16:51 crc kubenswrapper[4721]: I1202 12:16:51.340105 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbdd31746d6f20e86a32ba99fee61c3e9dadc5f76ccd0ff940a41d5055c5aa11"} err="failed to get container status \"cbdd31746d6f20e86a32ba99fee61c3e9dadc5f76ccd0ff940a41d5055c5aa11\": rpc error: code = NotFound desc = could not find container \"cbdd31746d6f20e86a32ba99fee61c3e9dadc5f76ccd0ff940a41d5055c5aa11\": container with ID starting with cbdd31746d6f20e86a32ba99fee61c3e9dadc5f76ccd0ff940a41d5055c5aa11 not found: ID does not exist" Dec 02 12:16:51 crc kubenswrapper[4721]: I1202 12:16:51.350483 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7349d6cc-58af-4d67-a865-31d8409c29cd-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:51 crc kubenswrapper[4721]: I1202 12:16:51.350603 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9h7s5\" (UniqueName: \"kubernetes.io/projected/7349d6cc-58af-4d67-a865-31d8409c29cd-kube-api-access-9h7s5\") on node \"crc\" DevicePath \"\"" Dec 02 12:16:51 crc kubenswrapper[4721]: I1202 12:16:51.350670 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["swift-kuttl-tests/keystone1088-account-delete-vlzc9"] Dec 02 12:16:51 crc kubenswrapper[4721]: I1202 12:16:51.357399 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["swift-kuttl-tests/keystone1088-account-delete-vlzc9"] Dec 02 12:16:52 crc kubenswrapper[4721]: I1202 12:16:52.853739 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7349d6cc-58af-4d67-a865-31d8409c29cd" path="/var/lib/kubelet/pods/7349d6cc-58af-4d67-a865-31d8409c29cd/volumes" Dec 02 12:16:55 crc kubenswrapper[4721]: I1202 12:16:55.347878 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-h5jq9/must-gather-r2trd" event={"ID":"e48d8132-fb2f-4162-9297-95a972e985e0","Type":"ContainerStarted","Data":"d5e7e53f171e4ef3bbd081d80daf18bfa10240ef7cdedfb9a1fa8c69f9c37994"} Dec 02 12:16:55 crc kubenswrapper[4721]: I1202 12:16:55.348463 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-h5jq9/must-gather-r2trd" event={"ID":"e48d8132-fb2f-4162-9297-95a972e985e0","Type":"ContainerStarted","Data":"4476479b8adaae5bb05c5aacd3168c85dec2ffdd68d8f3e2af6574fbb24c8c24"} Dec 02 12:16:55 crc kubenswrapper[4721]: I1202 12:16:55.361310 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-h5jq9/must-gather-r2trd" podStartSLOduration=1.6972645069999999 podStartE2EDuration="5.36126083s" podCreationTimestamp="2025-12-02 12:16:50 +0000 UTC" firstStartedPulling="2025-12-02 12:16:51.126182071 +0000 UTC m=+1540.864732420" lastFinishedPulling="2025-12-02 12:16:54.790178374 +0000 UTC m=+1544.528728743" observedRunningTime="2025-12-02 12:16:55.361132706 +0000 UTC m=+1545.099683055" watchObservedRunningTime="2025-12-02 12:16:55.36126083 +0000 UTC m=+1545.099811199" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.465668 4721 scope.go:117] "RemoveContainer" containerID="79048ad547310d151b543ca2c4d92123297c5d7450460d18f72db7d8fafb3fac" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.492858 4721 scope.go:117] "RemoveContainer" containerID="b209ace06b46ae1736b7777c480e2ebbec7d2d80b33606460a3e42f6eb9e8b63" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.509476 4721 scope.go:117] "RemoveContainer" containerID="6a14acfad3a520fab2c978ed50126f1f07a27f4477cf6fce3e94912e0a93ef5b" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.523325 4721 scope.go:117] "RemoveContainer" containerID="646d2606fee5f76263970ee9d43a4a87465a4433a234be1bb22de42d414b0577" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.542623 4721 scope.go:117] "RemoveContainer" containerID="5e4bd538fec49d6ffcab7dc5632b80a9c071dac18fde28366f23eab915fc0a58" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.568153 4721 scope.go:117] "RemoveContainer" containerID="2eb528eb57613938286320dc883dc05bef02c820daaff9518effa88078a359e1" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.585630 4721 scope.go:117] "RemoveContainer" containerID="015794c7a59acb77419f4cb9303b7c8104a1cffc470528cbb0728af7ce93d1bb" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.624054 4721 scope.go:117] "RemoveContainer" containerID="eac8db7656343e113eab7fa319706f4aa7e3ffd7817df882e1564f9bd3a0f1cf" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.638417 4721 scope.go:117] "RemoveContainer" containerID="399113c68a08ec5da56d1d064dffcf8274965c542e9200e84cddcda9cbbf4424" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.651764 4721 scope.go:117] "RemoveContainer" containerID="a87b177f0540cc3e19c31eac0f39199242540d018e4084a050ad66d6f05b5476" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.667718 4721 scope.go:117] "RemoveContainer" containerID="0d43e9d61360f9b06a89842cd1d4e906cf50c0bebfbf635c357ad4c1ad7578cd" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.690963 4721 scope.go:117] "RemoveContainer" containerID="ce28b6e9f05405a0b6043c588b3af01293215c9b71a887847201c5e25e84c087" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.705359 4721 scope.go:117] "RemoveContainer" containerID="5f7af03addf6fbc441eb5c04f6ca014d9db4b4045a192146b05b7c1617cb20c6" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.719774 4721 scope.go:117] "RemoveContainer" containerID="7a094837d14f6e8e9e3f9401ad182265db1d1a03ca5e4fbfbadd9310b2a3515b" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.735643 4721 scope.go:117] "RemoveContainer" containerID="6ddf107d4c6a19d18c52a20eda9aee5c810eca972126e37d6f26ba4a18a55fbb" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.750606 4721 scope.go:117] "RemoveContainer" containerID="18b6e4a6f8e7df1af99a0dd9ecd2a89682cf778ff4a5070ecad697cafcc9de71" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.765374 4721 scope.go:117] "RemoveContainer" containerID="8c534a6b39d953cdb87279019d88bfb7307b6abe29af4a7dd5ea0994849fb449" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.782831 4721 scope.go:117] "RemoveContainer" containerID="a4098a524eb6077b7f2eba5a8cc8b53104ff0ba883f693651e5fcbe91c89ae15" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.797523 4721 scope.go:117] "RemoveContainer" containerID="d9453615c91a9028a3fa90cd00b70ad0567371011d6702dbb05879e5b2901d09" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.811741 4721 scope.go:117] "RemoveContainer" containerID="ff56691d3a9c1f98185ec467975a12a9eb5195656a43f650c14f5027db84c79c" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.826219 4721 scope.go:117] "RemoveContainer" containerID="7a3d988b6f2d9267bde321ead3e578f3e1b5cc60aa115a5283457bd419e54df0" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.841433 4721 scope.go:117] "RemoveContainer" containerID="3d2811372dcaa70a66210d9a4e9105876d21bcdb977c4e8b73cc0002a8ccde32" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.858614 4721 scope.go:117] "RemoveContainer" containerID="c98f9453ed9e75ea4836cdf8e6b5a3244e10b1a2aa391d76ff2f94bb0544c315" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.874367 4721 scope.go:117] "RemoveContainer" containerID="35c98f8a826109cd5f3068477fa1fcb608d809c15e6bf77199d49da95401ab26" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.900117 4721 scope.go:117] "RemoveContainer" containerID="989a2a67669099b9a0dcfb4dbef74860804b11705d940e4a4b9b264c9d38d30e" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.916757 4721 scope.go:117] "RemoveContainer" containerID="c97a160f62600a18331f406fb788af3f4416d91e53515e01cd8a2bd6f5f86ffc" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.933331 4721 scope.go:117] "RemoveContainer" containerID="fa529b8d14ca353f826dbcb7b3dcb20221b0e0b1d7224e9cf7e7ab88fc907250" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.946733 4721 scope.go:117] "RemoveContainer" containerID="3a09d2a686d153d1883de807db818b9a01ac2db58431e30879a72e21f4a5f38c" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.963842 4721 scope.go:117] "RemoveContainer" containerID="24ba6660ddc36471c262bd11a52bbe8a3bad428befacec5ddf02eedc95c3e6d0" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.982404 4721 scope.go:117] "RemoveContainer" containerID="6b07c8c1ef6d3a5e2c6bd5e43e3d99bde1cda9436d1c37bf2ddab883430dc9d2" Dec 02 12:17:13 crc kubenswrapper[4721]: I1202 12:17:13.997468 4721 scope.go:117] "RemoveContainer" containerID="9e9caea58dfca76a063dd874352d9f9858dd92aded0f4e64c849be5341430382" Dec 02 12:17:14 crc kubenswrapper[4721]: I1202 12:17:14.012687 4721 scope.go:117] "RemoveContainer" containerID="7beea8932fafcaff62d322d433c687da18e8d2f63e82a7e4f62d21d07c22445a" Dec 02 12:17:14 crc kubenswrapper[4721]: I1202 12:17:14.026872 4721 scope.go:117] "RemoveContainer" containerID="8867cd60b44e32998edc44d0665008f6aa6ef8d6916aaf67946423d4b7610a05" Dec 02 12:17:14 crc kubenswrapper[4721]: I1202 12:17:14.042284 4721 scope.go:117] "RemoveContainer" containerID="a6830c7ebe1d4ca0528ad5a79fce46880086b7fe0020443bcffd89f877cc8c74" Dec 02 12:17:14 crc kubenswrapper[4721]: I1202 12:17:14.059114 4721 scope.go:117] "RemoveContainer" containerID="a650ef7ce606582ec00ad4221f0497a8fdce279b33223a7af7d20baafc76f5b0" Dec 02 12:17:14 crc kubenswrapper[4721]: I1202 12:17:14.074499 4721 scope.go:117] "RemoveContainer" containerID="6a414ffa8cfba24ff794f2c3eb9a2556a18ebd835959ebf730eba0362661e927" Dec 02 12:17:14 crc kubenswrapper[4721]: I1202 12:17:14.091298 4721 scope.go:117] "RemoveContainer" containerID="2325e9c54e3ce65b76d6660c16911dc3967707bea0610b71982b95a045f08a45" Dec 02 12:17:14 crc kubenswrapper[4721]: I1202 12:17:14.105253 4721 scope.go:117] "RemoveContainer" containerID="d9aac66e1bee7ed2128bc3654eaebc452e06589349c317d9c97a90fddf5013cb" Dec 02 12:17:14 crc kubenswrapper[4721]: I1202 12:17:14.124294 4721 scope.go:117] "RemoveContainer" containerID="e232483cb243215b722c6f0bfb3531c2d52e026ab0148df621f5a505cc098d6c" Dec 02 12:17:14 crc kubenswrapper[4721]: I1202 12:17:14.138692 4721 scope.go:117] "RemoveContainer" containerID="33b1828595b8c80890eae52dfcd95d97abd22b8b0c3f51c5b0f3914c5c94004e" Dec 02 12:17:14 crc kubenswrapper[4721]: I1202 12:17:14.157061 4721 scope.go:117] "RemoveContainer" containerID="3657190670b930bd797e45c74d816925ec80717d14b18e7006b8f6b6e3f7deb9" Dec 02 12:17:14 crc kubenswrapper[4721]: I1202 12:17:14.175169 4721 scope.go:117] "RemoveContainer" containerID="8b0f2704fb9f94f27d0ac560e8f3865dc7642a1105cb915ddbe379c0c7a1723b" Dec 02 12:17:35 crc kubenswrapper[4721]: I1202 12:17:35.838932 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-drfrd_18b11741-408b-4270-abd3-b43fd24149ab/control-plane-machine-set-operator/0.log" Dec 02 12:17:36 crc kubenswrapper[4721]: I1202 12:17:36.017197 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-zjwgh_b464a77f-4bf7-4719-98c2-d07e5bde858a/machine-api-operator/0.log" Dec 02 12:17:36 crc kubenswrapper[4721]: I1202 12:17:36.029366 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-zjwgh_b464a77f-4bf7-4719-98c2-d07e5bde858a/kube-rbac-proxy/0.log" Dec 02 12:17:49 crc kubenswrapper[4721]: I1202 12:17:49.741915 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-clfzp_15a0fceb-bf2e-4a05-870e-7501c772ae6c/kube-rbac-proxy/0.log" Dec 02 12:17:49 crc kubenswrapper[4721]: I1202 12:17:49.759783 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-clfzp_15a0fceb-bf2e-4a05-870e-7501c772ae6c/controller/0.log" Dec 02 12:17:49 crc kubenswrapper[4721]: I1202 12:17:49.943151 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/cp-frr-files/0.log" Dec 02 12:17:50 crc kubenswrapper[4721]: I1202 12:17:50.039877 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/cp-frr-files/0.log" Dec 02 12:17:50 crc kubenswrapper[4721]: I1202 12:17:50.062057 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/cp-reloader/0.log" Dec 02 12:17:50 crc kubenswrapper[4721]: I1202 12:17:50.076275 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/cp-metrics/0.log" Dec 02 12:17:50 crc kubenswrapper[4721]: I1202 12:17:50.111597 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/cp-reloader/0.log" Dec 02 12:17:50 crc kubenswrapper[4721]: I1202 12:17:50.290036 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/cp-metrics/0.log" Dec 02 12:17:50 crc kubenswrapper[4721]: I1202 12:17:50.305295 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/cp-reloader/0.log" Dec 02 12:17:50 crc kubenswrapper[4721]: I1202 12:17:50.321623 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/cp-frr-files/0.log" Dec 02 12:17:50 crc kubenswrapper[4721]: I1202 12:17:50.321813 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/cp-metrics/0.log" Dec 02 12:17:50 crc kubenswrapper[4721]: I1202 12:17:50.486710 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/cp-reloader/0.log" Dec 02 12:17:50 crc kubenswrapper[4721]: I1202 12:17:50.487356 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/cp-frr-files/0.log" Dec 02 12:17:50 crc kubenswrapper[4721]: I1202 12:17:50.512772 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/controller/0.log" Dec 02 12:17:50 crc kubenswrapper[4721]: I1202 12:17:50.516327 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/cp-metrics/0.log" Dec 02 12:17:50 crc kubenswrapper[4721]: I1202 12:17:50.640546 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/frr-metrics/0.log" Dec 02 12:17:50 crc kubenswrapper[4721]: I1202 12:17:50.693022 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/kube-rbac-proxy/0.log" Dec 02 12:17:50 crc kubenswrapper[4721]: I1202 12:17:50.714726 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/kube-rbac-proxy-frr/0.log" Dec 02 12:17:50 crc kubenswrapper[4721]: I1202 12:17:50.821171 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/reloader/0.log" Dec 02 12:17:50 crc kubenswrapper[4721]: I1202 12:17:50.937257 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-qmwqj_33b0035f-5cb5-458f-a5f9-498a488c358f/frr-k8s-webhook-server/0.log" Dec 02 12:17:51 crc kubenswrapper[4721]: I1202 12:17:51.058300 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-86c7ccf9f9-b95bs_c7571582-1d03-4d8d-92f0-0ec120163508/manager/0.log" Dec 02 12:17:51 crc kubenswrapper[4721]: I1202 12:17:51.140980 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/frr/0.log" Dec 02 12:17:51 crc kubenswrapper[4721]: I1202 12:17:51.159597 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5c48b86c94-n454v_1e7d973f-0100-4daa-b4ca-f6f8f9b90a22/webhook-server/0.log" Dec 02 12:17:51 crc kubenswrapper[4721]: I1202 12:17:51.260029 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-m4dmx_b91afd1f-b9c6-49e2-8c26-93bdf658600e/kube-rbac-proxy/0.log" Dec 02 12:17:51 crc kubenswrapper[4721]: I1202 12:17:51.392044 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-m4dmx_b91afd1f-b9c6-49e2-8c26-93bdf658600e/speaker/0.log" Dec 02 12:17:55 crc kubenswrapper[4721]: I1202 12:17:55.703319 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mcr6s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 12:17:55 crc kubenswrapper[4721]: I1202 12:17:55.703849 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 12:18:12 crc kubenswrapper[4721]: I1202 12:18:12.352418 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl_dc649529-91a2-4d31-9c9c-1ad1523d7182/util/0.log" Dec 02 12:18:12 crc kubenswrapper[4721]: I1202 12:18:12.480504 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl_dc649529-91a2-4d31-9c9c-1ad1523d7182/util/0.log" Dec 02 12:18:12 crc kubenswrapper[4721]: I1202 12:18:12.514884 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl_dc649529-91a2-4d31-9c9c-1ad1523d7182/pull/0.log" Dec 02 12:18:12 crc kubenswrapper[4721]: I1202 12:18:12.532531 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl_dc649529-91a2-4d31-9c9c-1ad1523d7182/pull/0.log" Dec 02 12:18:12 crc kubenswrapper[4721]: I1202 12:18:12.697772 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl_dc649529-91a2-4d31-9c9c-1ad1523d7182/extract/0.log" Dec 02 12:18:12 crc kubenswrapper[4721]: I1202 12:18:12.708445 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl_dc649529-91a2-4d31-9c9c-1ad1523d7182/pull/0.log" Dec 02 12:18:12 crc kubenswrapper[4721]: I1202 12:18:12.722269 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl_dc649529-91a2-4d31-9c9c-1ad1523d7182/util/0.log" Dec 02 12:18:12 crc kubenswrapper[4721]: I1202 12:18:12.834591 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-wwzsl_0cd807a4-666e-4d8e-937d-bddbe10b5587/extract-utilities/0.log" Dec 02 12:18:12 crc kubenswrapper[4721]: I1202 12:18:12.995870 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-wwzsl_0cd807a4-666e-4d8e-937d-bddbe10b5587/extract-utilities/0.log" Dec 02 12:18:13 crc kubenswrapper[4721]: I1202 12:18:13.009556 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-wwzsl_0cd807a4-666e-4d8e-937d-bddbe10b5587/extract-content/0.log" Dec 02 12:18:13 crc kubenswrapper[4721]: I1202 12:18:13.040200 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-wwzsl_0cd807a4-666e-4d8e-937d-bddbe10b5587/extract-content/0.log" Dec 02 12:18:13 crc kubenswrapper[4721]: I1202 12:18:13.178271 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-wwzsl_0cd807a4-666e-4d8e-937d-bddbe10b5587/extract-utilities/0.log" Dec 02 12:18:13 crc kubenswrapper[4721]: I1202 12:18:13.202204 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-wwzsl_0cd807a4-666e-4d8e-937d-bddbe10b5587/extract-content/0.log" Dec 02 12:18:13 crc kubenswrapper[4721]: I1202 12:18:13.378071 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-k95pl_f0efa73b-b118-4069-a168-bb26db513ef7/extract-utilities/0.log" Dec 02 12:18:13 crc kubenswrapper[4721]: I1202 12:18:13.524530 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-k95pl_f0efa73b-b118-4069-a168-bb26db513ef7/extract-content/0.log" Dec 02 12:18:13 crc kubenswrapper[4721]: I1202 12:18:13.533256 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-wwzsl_0cd807a4-666e-4d8e-937d-bddbe10b5587/registry-server/0.log" Dec 02 12:18:13 crc kubenswrapper[4721]: I1202 12:18:13.556705 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-k95pl_f0efa73b-b118-4069-a168-bb26db513ef7/extract-content/0.log" Dec 02 12:18:13 crc kubenswrapper[4721]: I1202 12:18:13.559685 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-k95pl_f0efa73b-b118-4069-a168-bb26db513ef7/extract-utilities/0.log" Dec 02 12:18:13 crc kubenswrapper[4721]: I1202 12:18:13.738728 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-k95pl_f0efa73b-b118-4069-a168-bb26db513ef7/extract-utilities/0.log" Dec 02 12:18:13 crc kubenswrapper[4721]: I1202 12:18:13.778605 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-k95pl_f0efa73b-b118-4069-a168-bb26db513ef7/extract-content/0.log" Dec 02 12:18:13 crc kubenswrapper[4721]: I1202 12:18:13.927861 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-w6v9r_9b2d6ebc-35e8-4617-ab74-d40b1a19d84e/marketplace-operator/0.log" Dec 02 12:18:13 crc kubenswrapper[4721]: I1202 12:18:13.990364 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-692sp_94263503-850e-479a-9335-8036ea37b30f/extract-utilities/0.log" Dec 02 12:18:14 crc kubenswrapper[4721]: I1202 12:18:14.073259 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-k95pl_f0efa73b-b118-4069-a168-bb26db513ef7/registry-server/0.log" Dec 02 12:18:14 crc kubenswrapper[4721]: I1202 12:18:14.145394 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-692sp_94263503-850e-479a-9335-8036ea37b30f/extract-utilities/0.log" Dec 02 12:18:14 crc kubenswrapper[4721]: I1202 12:18:14.196637 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-692sp_94263503-850e-479a-9335-8036ea37b30f/extract-content/0.log" Dec 02 12:18:14 crc kubenswrapper[4721]: I1202 12:18:14.197332 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-692sp_94263503-850e-479a-9335-8036ea37b30f/extract-content/0.log" Dec 02 12:18:14 crc kubenswrapper[4721]: I1202 12:18:14.339929 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-692sp_94263503-850e-479a-9335-8036ea37b30f/extract-utilities/0.log" Dec 02 12:18:14 crc kubenswrapper[4721]: I1202 12:18:14.354199 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-692sp_94263503-850e-479a-9335-8036ea37b30f/extract-content/0.log" Dec 02 12:18:14 crc kubenswrapper[4721]: I1202 12:18:14.447266 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-692sp_94263503-850e-479a-9335-8036ea37b30f/registry-server/0.log" Dec 02 12:18:14 crc kubenswrapper[4721]: I1202 12:18:14.496756 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fd78g_e586d5d2-126d-4886-af16-793d937c0eaf/extract-utilities/0.log" Dec 02 12:18:14 crc kubenswrapper[4721]: I1202 12:18:14.596896 4721 scope.go:117] "RemoveContainer" containerID="01b24f21030f43edfac8cb49a1c0d04e6cbccb0a6f2dbd44d56ea1f64f4cffed" Dec 02 12:18:14 crc kubenswrapper[4721]: I1202 12:18:14.634007 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fd78g_e586d5d2-126d-4886-af16-793d937c0eaf/extract-utilities/0.log" Dec 02 12:18:14 crc kubenswrapper[4721]: I1202 12:18:14.658483 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fd78g_e586d5d2-126d-4886-af16-793d937c0eaf/extract-content/0.log" Dec 02 12:18:14 crc kubenswrapper[4721]: I1202 12:18:14.666419 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fd78g_e586d5d2-126d-4886-af16-793d937c0eaf/extract-content/0.log" Dec 02 12:18:14 crc kubenswrapper[4721]: I1202 12:18:14.824850 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fd78g_e586d5d2-126d-4886-af16-793d937c0eaf/extract-utilities/0.log" Dec 02 12:18:14 crc kubenswrapper[4721]: I1202 12:18:14.840277 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fd78g_e586d5d2-126d-4886-af16-793d937c0eaf/extract-content/0.log" Dec 02 12:18:15 crc kubenswrapper[4721]: I1202 12:18:15.168113 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fd78g_e586d5d2-126d-4886-af16-793d937c0eaf/registry-server/0.log" Dec 02 12:18:25 crc kubenswrapper[4721]: I1202 12:18:25.703090 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mcr6s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 12:18:25 crc kubenswrapper[4721]: I1202 12:18:25.703726 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 12:18:55 crc kubenswrapper[4721]: I1202 12:18:55.703463 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mcr6s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 12:18:55 crc kubenswrapper[4721]: I1202 12:18:55.704114 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 12:18:55 crc kubenswrapper[4721]: I1202 12:18:55.704166 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" Dec 02 12:18:55 crc kubenswrapper[4721]: I1202 12:18:55.704872 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0"} pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 12:18:55 crc kubenswrapper[4721]: I1202 12:18:55.704948 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" containerID="cri-o://d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0" gracePeriod=600 Dec 02 12:18:55 crc kubenswrapper[4721]: E1202 12:18:55.829438 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mcr6s_openshift-machine-config-operator(141842b6-fd0c-48e6-9795-a627e6f96bd4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" Dec 02 12:18:56 crc kubenswrapper[4721]: I1202 12:18:56.086878 4721 generic.go:334] "Generic (PLEG): container finished" podID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerID="d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0" exitCode=0 Dec 02 12:18:56 crc kubenswrapper[4721]: I1202 12:18:56.086958 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" event={"ID":"141842b6-fd0c-48e6-9795-a627e6f96bd4","Type":"ContainerDied","Data":"d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0"} Dec 02 12:18:56 crc kubenswrapper[4721]: I1202 12:18:56.087412 4721 scope.go:117] "RemoveContainer" containerID="e66eb73009f2b3bd9c935e4952f8884d1696ff381f4f38bc986a2a57bb5b7ce6" Dec 02 12:18:56 crc kubenswrapper[4721]: I1202 12:18:56.089023 4721 scope.go:117] "RemoveContainer" containerID="d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0" Dec 02 12:18:56 crc kubenswrapper[4721]: E1202 12:18:56.089889 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mcr6s_openshift-machine-config-operator(141842b6-fd0c-48e6-9795-a627e6f96bd4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" Dec 02 12:19:07 crc kubenswrapper[4721]: I1202 12:19:07.840752 4721 scope.go:117] "RemoveContainer" containerID="d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0" Dec 02 12:19:07 crc kubenswrapper[4721]: E1202 12:19:07.841591 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mcr6s_openshift-machine-config-operator(141842b6-fd0c-48e6-9795-a627e6f96bd4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" Dec 02 12:19:14 crc kubenswrapper[4721]: I1202 12:19:14.651142 4721 scope.go:117] "RemoveContainer" containerID="e3595ce08cc19a10f06d9763a02bfd4c692e4315e633abef85a1ba70e959fcda" Dec 02 12:19:14 crc kubenswrapper[4721]: I1202 12:19:14.686422 4721 scope.go:117] "RemoveContainer" containerID="ff082e662a26aeeea6732d9ad265b9acb9da661a4252a9d3dd52302629db893e" Dec 02 12:19:14 crc kubenswrapper[4721]: I1202 12:19:14.701589 4721 scope.go:117] "RemoveContainer" containerID="bdbbf47a97448d8e3f16f5aeb238ed4d4f54e04bba167e8432c8d3dffe4602ca" Dec 02 12:19:14 crc kubenswrapper[4721]: I1202 12:19:14.721518 4721 scope.go:117] "RemoveContainer" containerID="489335ac195dc047e4f2b61ca4ae7b3d1810370d114a16f7033f42068f35364d" Dec 02 12:19:14 crc kubenswrapper[4721]: I1202 12:19:14.741522 4721 scope.go:117] "RemoveContainer" containerID="6e9c97886d2281d2db8385c13740c5db13108dbad3318be268566bdf4ef38e60" Dec 02 12:19:14 crc kubenswrapper[4721]: I1202 12:19:14.766183 4721 scope.go:117] "RemoveContainer" containerID="3c4eacf0568ae73dd6413ce1561622aa8998c530cf8df7cce6ebe9c2c46d80b0" Dec 02 12:19:14 crc kubenswrapper[4721]: I1202 12:19:14.784120 4721 scope.go:117] "RemoveContainer" containerID="ddcdd1ec5d88f5c19c5c33956dd876023076ce8196b01b8b60b11011d978cc74" Dec 02 12:19:14 crc kubenswrapper[4721]: I1202 12:19:14.804426 4721 scope.go:117] "RemoveContainer" containerID="3012afa35630159ed0600ebceef3df1eaf4dfb0728ea48837272b66f5dce0c7b" Dec 02 12:19:14 crc kubenswrapper[4721]: I1202 12:19:14.826803 4721 scope.go:117] "RemoveContainer" containerID="056786e0cc65aa5944f5f7bf4545d4c3cd9ae097250472dddf1c826cb1ac9231" Dec 02 12:19:14 crc kubenswrapper[4721]: I1202 12:19:14.854283 4721 scope.go:117] "RemoveContainer" containerID="8363b9e2ac15e6050113b7320c011910acd54ded41353912314d366cc137db51" Dec 02 12:19:17 crc kubenswrapper[4721]: I1202 12:19:17.222258 4721 generic.go:334] "Generic (PLEG): container finished" podID="e48d8132-fb2f-4162-9297-95a972e985e0" containerID="4476479b8adaae5bb05c5aacd3168c85dec2ffdd68d8f3e2af6574fbb24c8c24" exitCode=0 Dec 02 12:19:17 crc kubenswrapper[4721]: I1202 12:19:17.222298 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-h5jq9/must-gather-r2trd" event={"ID":"e48d8132-fb2f-4162-9297-95a972e985e0","Type":"ContainerDied","Data":"4476479b8adaae5bb05c5aacd3168c85dec2ffdd68d8f3e2af6574fbb24c8c24"} Dec 02 12:19:17 crc kubenswrapper[4721]: I1202 12:19:17.223026 4721 scope.go:117] "RemoveContainer" containerID="4476479b8adaae5bb05c5aacd3168c85dec2ffdd68d8f3e2af6574fbb24c8c24" Dec 02 12:19:17 crc kubenswrapper[4721]: I1202 12:19:17.859189 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-h5jq9_must-gather-r2trd_e48d8132-fb2f-4162-9297-95a972e985e0/gather/0.log" Dec 02 12:19:18 crc kubenswrapper[4721]: I1202 12:19:18.840240 4721 scope.go:117] "RemoveContainer" containerID="d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0" Dec 02 12:19:18 crc kubenswrapper[4721]: E1202 12:19:18.841383 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mcr6s_openshift-machine-config-operator(141842b6-fd0c-48e6-9795-a627e6f96bd4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" Dec 02 12:19:24 crc kubenswrapper[4721]: I1202 12:19:24.746146 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-h5jq9/must-gather-r2trd"] Dec 02 12:19:24 crc kubenswrapper[4721]: I1202 12:19:24.746783 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-h5jq9/must-gather-r2trd" podUID="e48d8132-fb2f-4162-9297-95a972e985e0" containerName="copy" containerID="cri-o://d5e7e53f171e4ef3bbd081d80daf18bfa10240ef7cdedfb9a1fa8c69f9c37994" gracePeriod=2 Dec 02 12:19:24 crc kubenswrapper[4721]: I1202 12:19:24.752325 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-h5jq9/must-gather-r2trd"] Dec 02 12:19:25 crc kubenswrapper[4721]: I1202 12:19:25.091088 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-h5jq9_must-gather-r2trd_e48d8132-fb2f-4162-9297-95a972e985e0/copy/0.log" Dec 02 12:19:25 crc kubenswrapper[4721]: I1202 12:19:25.091614 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-h5jq9/must-gather-r2trd" Dec 02 12:19:25 crc kubenswrapper[4721]: I1202 12:19:25.279090 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e48d8132-fb2f-4162-9297-95a972e985e0-must-gather-output\") pod \"e48d8132-fb2f-4162-9297-95a972e985e0\" (UID: \"e48d8132-fb2f-4162-9297-95a972e985e0\") " Dec 02 12:19:25 crc kubenswrapper[4721]: I1202 12:19:25.279323 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5fgsf\" (UniqueName: \"kubernetes.io/projected/e48d8132-fb2f-4162-9297-95a972e985e0-kube-api-access-5fgsf\") pod \"e48d8132-fb2f-4162-9297-95a972e985e0\" (UID: \"e48d8132-fb2f-4162-9297-95a972e985e0\") " Dec 02 12:19:25 crc kubenswrapper[4721]: I1202 12:19:25.281710 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-h5jq9_must-gather-r2trd_e48d8132-fb2f-4162-9297-95a972e985e0/copy/0.log" Dec 02 12:19:25 crc kubenswrapper[4721]: I1202 12:19:25.282558 4721 generic.go:334] "Generic (PLEG): container finished" podID="e48d8132-fb2f-4162-9297-95a972e985e0" containerID="d5e7e53f171e4ef3bbd081d80daf18bfa10240ef7cdedfb9a1fa8c69f9c37994" exitCode=143 Dec 02 12:19:25 crc kubenswrapper[4721]: I1202 12:19:25.282764 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-h5jq9/must-gather-r2trd" Dec 02 12:19:25 crc kubenswrapper[4721]: I1202 12:19:25.283373 4721 scope.go:117] "RemoveContainer" containerID="d5e7e53f171e4ef3bbd081d80daf18bfa10240ef7cdedfb9a1fa8c69f9c37994" Dec 02 12:19:25 crc kubenswrapper[4721]: I1202 12:19:25.286184 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e48d8132-fb2f-4162-9297-95a972e985e0-kube-api-access-5fgsf" (OuterVolumeSpecName: "kube-api-access-5fgsf") pod "e48d8132-fb2f-4162-9297-95a972e985e0" (UID: "e48d8132-fb2f-4162-9297-95a972e985e0"). InnerVolumeSpecName "kube-api-access-5fgsf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:19:25 crc kubenswrapper[4721]: I1202 12:19:25.316299 4721 scope.go:117] "RemoveContainer" containerID="4476479b8adaae5bb05c5aacd3168c85dec2ffdd68d8f3e2af6574fbb24c8c24" Dec 02 12:19:25 crc kubenswrapper[4721]: I1202 12:19:25.355363 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e48d8132-fb2f-4162-9297-95a972e985e0-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "e48d8132-fb2f-4162-9297-95a972e985e0" (UID: "e48d8132-fb2f-4162-9297-95a972e985e0"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:19:25 crc kubenswrapper[4721]: I1202 12:19:25.365813 4721 scope.go:117] "RemoveContainer" containerID="d5e7e53f171e4ef3bbd081d80daf18bfa10240ef7cdedfb9a1fa8c69f9c37994" Dec 02 12:19:25 crc kubenswrapper[4721]: E1202 12:19:25.366252 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5e7e53f171e4ef3bbd081d80daf18bfa10240ef7cdedfb9a1fa8c69f9c37994\": container with ID starting with d5e7e53f171e4ef3bbd081d80daf18bfa10240ef7cdedfb9a1fa8c69f9c37994 not found: ID does not exist" containerID="d5e7e53f171e4ef3bbd081d80daf18bfa10240ef7cdedfb9a1fa8c69f9c37994" Dec 02 12:19:25 crc kubenswrapper[4721]: I1202 12:19:25.366291 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5e7e53f171e4ef3bbd081d80daf18bfa10240ef7cdedfb9a1fa8c69f9c37994"} err="failed to get container status \"d5e7e53f171e4ef3bbd081d80daf18bfa10240ef7cdedfb9a1fa8c69f9c37994\": rpc error: code = NotFound desc = could not find container \"d5e7e53f171e4ef3bbd081d80daf18bfa10240ef7cdedfb9a1fa8c69f9c37994\": container with ID starting with d5e7e53f171e4ef3bbd081d80daf18bfa10240ef7cdedfb9a1fa8c69f9c37994 not found: ID does not exist" Dec 02 12:19:25 crc kubenswrapper[4721]: I1202 12:19:25.366328 4721 scope.go:117] "RemoveContainer" containerID="4476479b8adaae5bb05c5aacd3168c85dec2ffdd68d8f3e2af6574fbb24c8c24" Dec 02 12:19:25 crc kubenswrapper[4721]: E1202 12:19:25.367922 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4476479b8adaae5bb05c5aacd3168c85dec2ffdd68d8f3e2af6574fbb24c8c24\": container with ID starting with 4476479b8adaae5bb05c5aacd3168c85dec2ffdd68d8f3e2af6574fbb24c8c24 not found: ID does not exist" containerID="4476479b8adaae5bb05c5aacd3168c85dec2ffdd68d8f3e2af6574fbb24c8c24" Dec 02 12:19:25 crc kubenswrapper[4721]: I1202 12:19:25.367954 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4476479b8adaae5bb05c5aacd3168c85dec2ffdd68d8f3e2af6574fbb24c8c24"} err="failed to get container status \"4476479b8adaae5bb05c5aacd3168c85dec2ffdd68d8f3e2af6574fbb24c8c24\": rpc error: code = NotFound desc = could not find container \"4476479b8adaae5bb05c5aacd3168c85dec2ffdd68d8f3e2af6574fbb24c8c24\": container with ID starting with 4476479b8adaae5bb05c5aacd3168c85dec2ffdd68d8f3e2af6574fbb24c8c24 not found: ID does not exist" Dec 02 12:19:25 crc kubenswrapper[4721]: I1202 12:19:25.382215 4721 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e48d8132-fb2f-4162-9297-95a972e985e0-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 02 12:19:25 crc kubenswrapper[4721]: I1202 12:19:25.382238 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5fgsf\" (UniqueName: \"kubernetes.io/projected/e48d8132-fb2f-4162-9297-95a972e985e0-kube-api-access-5fgsf\") on node \"crc\" DevicePath \"\"" Dec 02 12:19:26 crc kubenswrapper[4721]: I1202 12:19:26.847571 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e48d8132-fb2f-4162-9297-95a972e985e0" path="/var/lib/kubelet/pods/e48d8132-fb2f-4162-9297-95a972e985e0/volumes" Dec 02 12:19:32 crc kubenswrapper[4721]: I1202 12:19:32.839929 4721 scope.go:117] "RemoveContainer" containerID="d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0" Dec 02 12:19:32 crc kubenswrapper[4721]: E1202 12:19:32.840694 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mcr6s_openshift-machine-config-operator(141842b6-fd0c-48e6-9795-a627e6f96bd4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" Dec 02 12:19:44 crc kubenswrapper[4721]: I1202 12:19:44.840041 4721 scope.go:117] "RemoveContainer" containerID="d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0" Dec 02 12:19:44 crc kubenswrapper[4721]: E1202 12:19:44.840683 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mcr6s_openshift-machine-config-operator(141842b6-fd0c-48e6-9795-a627e6f96bd4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" Dec 02 12:19:59 crc kubenswrapper[4721]: I1202 12:19:59.839924 4721 scope.go:117] "RemoveContainer" containerID="d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0" Dec 02 12:19:59 crc kubenswrapper[4721]: E1202 12:19:59.840814 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mcr6s_openshift-machine-config-operator(141842b6-fd0c-48e6-9795-a627e6f96bd4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" Dec 02 12:20:13 crc kubenswrapper[4721]: I1202 12:20:13.839608 4721 scope.go:117] "RemoveContainer" containerID="d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0" Dec 02 12:20:13 crc kubenswrapper[4721]: E1202 12:20:13.840229 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mcr6s_openshift-machine-config-operator(141842b6-fd0c-48e6-9795-a627e6f96bd4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" Dec 02 12:20:14 crc kubenswrapper[4721]: I1202 12:20:14.895331 4721 scope.go:117] "RemoveContainer" containerID="1c7c9162b575caa76db5e414cb7b4015f067355fff84caa7f8ac197367c94e01" Dec 02 12:20:14 crc kubenswrapper[4721]: I1202 12:20:14.921263 4721 scope.go:117] "RemoveContainer" containerID="479d0dec2aaa6fd4ccce84dbbf0d0e58053b85167261d3f59757a121591636bc" Dec 02 12:20:14 crc kubenswrapper[4721]: I1202 12:20:14.954903 4721 scope.go:117] "RemoveContainer" containerID="39c5390ffcbe3c0be784db7ef05672025fcf3d952d626304e297e1590718a897" Dec 02 12:20:14 crc kubenswrapper[4721]: I1202 12:20:14.970759 4721 scope.go:117] "RemoveContainer" containerID="d1d795e05b76189fa88a0000068bea65d1418dc6a05ab971bb5f8f433e613460" Dec 02 12:20:14 crc kubenswrapper[4721]: I1202 12:20:14.987659 4721 scope.go:117] "RemoveContainer" containerID="37cc4ccb7d8d4a2ff89c5372f3428d8517c9cf30a414c165ecbb4c6f1b5fa94c" Dec 02 12:20:15 crc kubenswrapper[4721]: I1202 12:20:15.010497 4721 scope.go:117] "RemoveContainer" containerID="80a25188c3a297462774a26f2f40dcb50a669d76b2758a866aa8379d3f1d83f2" Dec 02 12:20:15 crc kubenswrapper[4721]: I1202 12:20:15.032159 4721 scope.go:117] "RemoveContainer" containerID="a09e88795ce2afc38328685e05ceb0f9eb725d61c3b98331e5658b0322ce0e3a" Dec 02 12:20:15 crc kubenswrapper[4721]: I1202 12:20:15.048994 4721 scope.go:117] "RemoveContainer" containerID="8075ed09bcab06c0a231c1cabf3e84c96324fed88d1662cf29bb85487628bcf7" Dec 02 12:20:15 crc kubenswrapper[4721]: I1202 12:20:15.066118 4721 scope.go:117] "RemoveContainer" containerID="ca8d8914238f7696f9bae1ac633efc7bd5b63e29dac383a6626db4cf4f83dcb2" Dec 02 12:20:28 crc kubenswrapper[4721]: I1202 12:20:28.840424 4721 scope.go:117] "RemoveContainer" containerID="d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0" Dec 02 12:20:28 crc kubenswrapper[4721]: E1202 12:20:28.842505 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mcr6s_openshift-machine-config-operator(141842b6-fd0c-48e6-9795-a627e6f96bd4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" Dec 02 12:20:39 crc kubenswrapper[4721]: I1202 12:20:39.839120 4721 scope.go:117] "RemoveContainer" containerID="d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0" Dec 02 12:20:39 crc kubenswrapper[4721]: E1202 12:20:39.839827 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mcr6s_openshift-machine-config-operator(141842b6-fd0c-48e6-9795-a627e6f96bd4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" Dec 02 12:20:52 crc kubenswrapper[4721]: I1202 12:20:52.839574 4721 scope.go:117] "RemoveContainer" containerID="d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0" Dec 02 12:20:52 crc kubenswrapper[4721]: E1202 12:20:52.840335 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mcr6s_openshift-machine-config-operator(141842b6-fd0c-48e6-9795-a627e6f96bd4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" Dec 02 12:21:05 crc kubenswrapper[4721]: I1202 12:21:05.839804 4721 scope.go:117] "RemoveContainer" containerID="d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0" Dec 02 12:21:05 crc kubenswrapper[4721]: E1202 12:21:05.841112 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mcr6s_openshift-machine-config-operator(141842b6-fd0c-48e6-9795-a627e6f96bd4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" Dec 02 12:21:15 crc kubenswrapper[4721]: I1202 12:21:15.130783 4721 scope.go:117] "RemoveContainer" containerID="20751d56bae21081ffb6f6e3c04b3159a9de2e729362ea5a49ab32939dbedc2c" Dec 02 12:21:19 crc kubenswrapper[4721]: I1202 12:21:19.839618 4721 scope.go:117] "RemoveContainer" containerID="d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0" Dec 02 12:21:19 crc kubenswrapper[4721]: E1202 12:21:19.841738 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mcr6s_openshift-machine-config-operator(141842b6-fd0c-48e6-9795-a627e6f96bd4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" Dec 02 12:21:30 crc kubenswrapper[4721]: I1202 12:21:30.841682 4721 scope.go:117] "RemoveContainer" containerID="d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0" Dec 02 12:21:30 crc kubenswrapper[4721]: E1202 12:21:30.842637 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mcr6s_openshift-machine-config-operator(141842b6-fd0c-48e6-9795-a627e6f96bd4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" Dec 02 12:21:41 crc kubenswrapper[4721]: I1202 12:21:41.839359 4721 scope.go:117] "RemoveContainer" containerID="d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0" Dec 02 12:21:41 crc kubenswrapper[4721]: E1202 12:21:41.840133 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mcr6s_openshift-machine-config-operator(141842b6-fd0c-48e6-9795-a627e6f96bd4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" Dec 02 12:21:46 crc kubenswrapper[4721]: I1202 12:21:46.385991 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8xjkm"] Dec 02 12:21:46 crc kubenswrapper[4721]: E1202 12:21:46.387276 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e48d8132-fb2f-4162-9297-95a972e985e0" containerName="copy" Dec 02 12:21:46 crc kubenswrapper[4721]: I1202 12:21:46.387290 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e48d8132-fb2f-4162-9297-95a972e985e0" containerName="copy" Dec 02 12:21:46 crc kubenswrapper[4721]: E1202 12:21:46.387306 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e48d8132-fb2f-4162-9297-95a972e985e0" containerName="gather" Dec 02 12:21:46 crc kubenswrapper[4721]: I1202 12:21:46.387313 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e48d8132-fb2f-4162-9297-95a972e985e0" containerName="gather" Dec 02 12:21:46 crc kubenswrapper[4721]: E1202 12:21:46.387324 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7349d6cc-58af-4d67-a865-31d8409c29cd" containerName="mariadb-account-delete" Dec 02 12:21:46 crc kubenswrapper[4721]: I1202 12:21:46.387330 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7349d6cc-58af-4d67-a865-31d8409c29cd" containerName="mariadb-account-delete" Dec 02 12:21:46 crc kubenswrapper[4721]: I1202 12:21:46.387452 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="7349d6cc-58af-4d67-a865-31d8409c29cd" containerName="mariadb-account-delete" Dec 02 12:21:46 crc kubenswrapper[4721]: I1202 12:21:46.387463 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="e48d8132-fb2f-4162-9297-95a972e985e0" containerName="gather" Dec 02 12:21:46 crc kubenswrapper[4721]: I1202 12:21:46.387474 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="e48d8132-fb2f-4162-9297-95a972e985e0" containerName="copy" Dec 02 12:21:46 crc kubenswrapper[4721]: I1202 12:21:46.388384 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8xjkm" Dec 02 12:21:46 crc kubenswrapper[4721]: I1202 12:21:46.400122 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8xjkm"] Dec 02 12:21:46 crc kubenswrapper[4721]: I1202 12:21:46.439846 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38756085-9e66-4558-98e3-f30d53668e71-catalog-content\") pod \"certified-operators-8xjkm\" (UID: \"38756085-9e66-4558-98e3-f30d53668e71\") " pod="openshift-marketplace/certified-operators-8xjkm" Dec 02 12:21:46 crc kubenswrapper[4721]: I1202 12:21:46.439900 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68fjk\" (UniqueName: \"kubernetes.io/projected/38756085-9e66-4558-98e3-f30d53668e71-kube-api-access-68fjk\") pod \"certified-operators-8xjkm\" (UID: \"38756085-9e66-4558-98e3-f30d53668e71\") " pod="openshift-marketplace/certified-operators-8xjkm" Dec 02 12:21:46 crc kubenswrapper[4721]: I1202 12:21:46.439922 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38756085-9e66-4558-98e3-f30d53668e71-utilities\") pod \"certified-operators-8xjkm\" (UID: \"38756085-9e66-4558-98e3-f30d53668e71\") " pod="openshift-marketplace/certified-operators-8xjkm" Dec 02 12:21:46 crc kubenswrapper[4721]: I1202 12:21:46.540744 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38756085-9e66-4558-98e3-f30d53668e71-catalog-content\") pod \"certified-operators-8xjkm\" (UID: \"38756085-9e66-4558-98e3-f30d53668e71\") " pod="openshift-marketplace/certified-operators-8xjkm" Dec 02 12:21:46 crc kubenswrapper[4721]: I1202 12:21:46.540806 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68fjk\" (UniqueName: \"kubernetes.io/projected/38756085-9e66-4558-98e3-f30d53668e71-kube-api-access-68fjk\") pod \"certified-operators-8xjkm\" (UID: \"38756085-9e66-4558-98e3-f30d53668e71\") " pod="openshift-marketplace/certified-operators-8xjkm" Dec 02 12:21:46 crc kubenswrapper[4721]: I1202 12:21:46.540833 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38756085-9e66-4558-98e3-f30d53668e71-utilities\") pod \"certified-operators-8xjkm\" (UID: \"38756085-9e66-4558-98e3-f30d53668e71\") " pod="openshift-marketplace/certified-operators-8xjkm" Dec 02 12:21:46 crc kubenswrapper[4721]: I1202 12:21:46.541323 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38756085-9e66-4558-98e3-f30d53668e71-utilities\") pod \"certified-operators-8xjkm\" (UID: \"38756085-9e66-4558-98e3-f30d53668e71\") " pod="openshift-marketplace/certified-operators-8xjkm" Dec 02 12:21:46 crc kubenswrapper[4721]: I1202 12:21:46.541577 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38756085-9e66-4558-98e3-f30d53668e71-catalog-content\") pod \"certified-operators-8xjkm\" (UID: \"38756085-9e66-4558-98e3-f30d53668e71\") " pod="openshift-marketplace/certified-operators-8xjkm" Dec 02 12:21:46 crc kubenswrapper[4721]: I1202 12:21:46.573032 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68fjk\" (UniqueName: \"kubernetes.io/projected/38756085-9e66-4558-98e3-f30d53668e71-kube-api-access-68fjk\") pod \"certified-operators-8xjkm\" (UID: \"38756085-9e66-4558-98e3-f30d53668e71\") " pod="openshift-marketplace/certified-operators-8xjkm" Dec 02 12:21:46 crc kubenswrapper[4721]: I1202 12:21:46.706701 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8xjkm" Dec 02 12:21:47 crc kubenswrapper[4721]: I1202 12:21:47.171415 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8xjkm"] Dec 02 12:21:47 crc kubenswrapper[4721]: I1202 12:21:47.734923 4721 generic.go:334] "Generic (PLEG): container finished" podID="38756085-9e66-4558-98e3-f30d53668e71" containerID="575b13ce596d179519a6edfbcdb7bc2bc96d1d225aa04daa2119afb08acc755e" exitCode=0 Dec 02 12:21:47 crc kubenswrapper[4721]: I1202 12:21:47.734982 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8xjkm" event={"ID":"38756085-9e66-4558-98e3-f30d53668e71","Type":"ContainerDied","Data":"575b13ce596d179519a6edfbcdb7bc2bc96d1d225aa04daa2119afb08acc755e"} Dec 02 12:21:47 crc kubenswrapper[4721]: I1202 12:21:47.735265 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8xjkm" event={"ID":"38756085-9e66-4558-98e3-f30d53668e71","Type":"ContainerStarted","Data":"4430f54ef73508a6c5c1b8aef1b6509111eda4bceffbae51decb4c526810c52d"} Dec 02 12:21:47 crc kubenswrapper[4721]: I1202 12:21:47.736899 4721 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 12:21:48 crc kubenswrapper[4721]: I1202 12:21:48.182220 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-76v9x"] Dec 02 12:21:48 crc kubenswrapper[4721]: I1202 12:21:48.186583 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-76v9x" Dec 02 12:21:48 crc kubenswrapper[4721]: I1202 12:21:48.192026 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-76v9x"] Dec 02 12:21:48 crc kubenswrapper[4721]: I1202 12:21:48.274726 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47vdf\" (UniqueName: \"kubernetes.io/projected/08910744-72ad-4df3-87bb-8f1e04ffdd00-kube-api-access-47vdf\") pod \"community-operators-76v9x\" (UID: \"08910744-72ad-4df3-87bb-8f1e04ffdd00\") " pod="openshift-marketplace/community-operators-76v9x" Dec 02 12:21:48 crc kubenswrapper[4721]: I1202 12:21:48.274944 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08910744-72ad-4df3-87bb-8f1e04ffdd00-catalog-content\") pod \"community-operators-76v9x\" (UID: \"08910744-72ad-4df3-87bb-8f1e04ffdd00\") " pod="openshift-marketplace/community-operators-76v9x" Dec 02 12:21:48 crc kubenswrapper[4721]: I1202 12:21:48.275056 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08910744-72ad-4df3-87bb-8f1e04ffdd00-utilities\") pod \"community-operators-76v9x\" (UID: \"08910744-72ad-4df3-87bb-8f1e04ffdd00\") " pod="openshift-marketplace/community-operators-76v9x" Dec 02 12:21:48 crc kubenswrapper[4721]: I1202 12:21:48.376602 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08910744-72ad-4df3-87bb-8f1e04ffdd00-catalog-content\") pod \"community-operators-76v9x\" (UID: \"08910744-72ad-4df3-87bb-8f1e04ffdd00\") " pod="openshift-marketplace/community-operators-76v9x" Dec 02 12:21:48 crc kubenswrapper[4721]: I1202 12:21:48.377071 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08910744-72ad-4df3-87bb-8f1e04ffdd00-utilities\") pod \"community-operators-76v9x\" (UID: \"08910744-72ad-4df3-87bb-8f1e04ffdd00\") " pod="openshift-marketplace/community-operators-76v9x" Dec 02 12:21:48 crc kubenswrapper[4721]: I1202 12:21:48.377219 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47vdf\" (UniqueName: \"kubernetes.io/projected/08910744-72ad-4df3-87bb-8f1e04ffdd00-kube-api-access-47vdf\") pod \"community-operators-76v9x\" (UID: \"08910744-72ad-4df3-87bb-8f1e04ffdd00\") " pod="openshift-marketplace/community-operators-76v9x" Dec 02 12:21:48 crc kubenswrapper[4721]: I1202 12:21:48.377317 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08910744-72ad-4df3-87bb-8f1e04ffdd00-catalog-content\") pod \"community-operators-76v9x\" (UID: \"08910744-72ad-4df3-87bb-8f1e04ffdd00\") " pod="openshift-marketplace/community-operators-76v9x" Dec 02 12:21:48 crc kubenswrapper[4721]: I1202 12:21:48.377590 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08910744-72ad-4df3-87bb-8f1e04ffdd00-utilities\") pod \"community-operators-76v9x\" (UID: \"08910744-72ad-4df3-87bb-8f1e04ffdd00\") " pod="openshift-marketplace/community-operators-76v9x" Dec 02 12:21:48 crc kubenswrapper[4721]: I1202 12:21:48.404161 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47vdf\" (UniqueName: \"kubernetes.io/projected/08910744-72ad-4df3-87bb-8f1e04ffdd00-kube-api-access-47vdf\") pod \"community-operators-76v9x\" (UID: \"08910744-72ad-4df3-87bb-8f1e04ffdd00\") " pod="openshift-marketplace/community-operators-76v9x" Dec 02 12:21:48 crc kubenswrapper[4721]: I1202 12:21:48.513920 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-76v9x" Dec 02 12:21:48 crc kubenswrapper[4721]: I1202 12:21:48.753128 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8xjkm" event={"ID":"38756085-9e66-4558-98e3-f30d53668e71","Type":"ContainerStarted","Data":"a8ded81a1023733a10cf158b9e1a0f408b3c01e9fe006014c8c86c4e7a455a15"} Dec 02 12:21:48 crc kubenswrapper[4721]: I1202 12:21:48.802617 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-76v9x"] Dec 02 12:21:48 crc kubenswrapper[4721]: W1202 12:21:48.806642 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod08910744_72ad_4df3_87bb_8f1e04ffdd00.slice/crio-0a7f550715b9cfa827f2a5807a0a1e7d7d2b588ee79eddb1cc2efdc07fe70c48 WatchSource:0}: Error finding container 0a7f550715b9cfa827f2a5807a0a1e7d7d2b588ee79eddb1cc2efdc07fe70c48: Status 404 returned error can't find the container with id 0a7f550715b9cfa827f2a5807a0a1e7d7d2b588ee79eddb1cc2efdc07fe70c48 Dec 02 12:21:49 crc kubenswrapper[4721]: I1202 12:21:49.762470 4721 generic.go:334] "Generic (PLEG): container finished" podID="38756085-9e66-4558-98e3-f30d53668e71" containerID="a8ded81a1023733a10cf158b9e1a0f408b3c01e9fe006014c8c86c4e7a455a15" exitCode=0 Dec 02 12:21:49 crc kubenswrapper[4721]: I1202 12:21:49.762556 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8xjkm" event={"ID":"38756085-9e66-4558-98e3-f30d53668e71","Type":"ContainerDied","Data":"a8ded81a1023733a10cf158b9e1a0f408b3c01e9fe006014c8c86c4e7a455a15"} Dec 02 12:21:49 crc kubenswrapper[4721]: I1202 12:21:49.764252 4721 generic.go:334] "Generic (PLEG): container finished" podID="08910744-72ad-4df3-87bb-8f1e04ffdd00" containerID="6d882f2d4e945b5ba0f103e7379a0615356ad8508ea3f1711d6090508ed4c1c1" exitCode=0 Dec 02 12:21:49 crc kubenswrapper[4721]: I1202 12:21:49.764400 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-76v9x" event={"ID":"08910744-72ad-4df3-87bb-8f1e04ffdd00","Type":"ContainerDied","Data":"6d882f2d4e945b5ba0f103e7379a0615356ad8508ea3f1711d6090508ed4c1c1"} Dec 02 12:21:49 crc kubenswrapper[4721]: I1202 12:21:49.764569 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-76v9x" event={"ID":"08910744-72ad-4df3-87bb-8f1e04ffdd00","Type":"ContainerStarted","Data":"0a7f550715b9cfa827f2a5807a0a1e7d7d2b588ee79eddb1cc2efdc07fe70c48"} Dec 02 12:21:50 crc kubenswrapper[4721]: I1202 12:21:50.783094 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8xjkm" event={"ID":"38756085-9e66-4558-98e3-f30d53668e71","Type":"ContainerStarted","Data":"5def17a91374044a3a6f9e6f57885466bec8744897ee5cf2c7b092031faa8ce6"} Dec 02 12:21:50 crc kubenswrapper[4721]: I1202 12:21:50.807599 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8xjkm" podStartSLOduration=2.389685714 podStartE2EDuration="4.807569443s" podCreationTimestamp="2025-12-02 12:21:46 +0000 UTC" firstStartedPulling="2025-12-02 12:21:47.736665984 +0000 UTC m=+1837.475216333" lastFinishedPulling="2025-12-02 12:21:50.154549713 +0000 UTC m=+1839.893100062" observedRunningTime="2025-12-02 12:21:50.802037742 +0000 UTC m=+1840.540588131" watchObservedRunningTime="2025-12-02 12:21:50.807569443 +0000 UTC m=+1840.546119822" Dec 02 12:21:51 crc kubenswrapper[4721]: I1202 12:21:51.790992 4721 generic.go:334] "Generic (PLEG): container finished" podID="08910744-72ad-4df3-87bb-8f1e04ffdd00" containerID="aed9ebeb97cbb061b16de099bc0f26cacf112c8d37298d997584acccbea02bb5" exitCode=0 Dec 02 12:21:51 crc kubenswrapper[4721]: I1202 12:21:51.791064 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-76v9x" event={"ID":"08910744-72ad-4df3-87bb-8f1e04ffdd00","Type":"ContainerDied","Data":"aed9ebeb97cbb061b16de099bc0f26cacf112c8d37298d997584acccbea02bb5"} Dec 02 12:21:52 crc kubenswrapper[4721]: I1202 12:21:52.802813 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-76v9x" event={"ID":"08910744-72ad-4df3-87bb-8f1e04ffdd00","Type":"ContainerStarted","Data":"8b7100c105f136d31ab7b97d88591aa27a4401682ea8414978965b87c555e5a3"} Dec 02 12:21:52 crc kubenswrapper[4721]: I1202 12:21:52.825232 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-76v9x" podStartSLOduration=2.315903265 podStartE2EDuration="4.82521458s" podCreationTimestamp="2025-12-02 12:21:48 +0000 UTC" firstStartedPulling="2025-12-02 12:21:49.766161666 +0000 UTC m=+1839.504712015" lastFinishedPulling="2025-12-02 12:21:52.275472981 +0000 UTC m=+1842.014023330" observedRunningTime="2025-12-02 12:21:52.8219852 +0000 UTC m=+1842.560535549" watchObservedRunningTime="2025-12-02 12:21:52.82521458 +0000 UTC m=+1842.563764939" Dec 02 12:21:55 crc kubenswrapper[4721]: I1202 12:21:55.839685 4721 scope.go:117] "RemoveContainer" containerID="d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0" Dec 02 12:21:55 crc kubenswrapper[4721]: E1202 12:21:55.840135 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mcr6s_openshift-machine-config-operator(141842b6-fd0c-48e6-9795-a627e6f96bd4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" Dec 02 12:21:56 crc kubenswrapper[4721]: I1202 12:21:56.707678 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8xjkm" Dec 02 12:21:56 crc kubenswrapper[4721]: I1202 12:21:56.707934 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8xjkm" Dec 02 12:21:56 crc kubenswrapper[4721]: I1202 12:21:56.741509 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8xjkm" Dec 02 12:21:56 crc kubenswrapper[4721]: I1202 12:21:56.857779 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8xjkm" Dec 02 12:21:57 crc kubenswrapper[4721]: I1202 12:21:57.571220 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8xjkm"] Dec 02 12:21:58 crc kubenswrapper[4721]: I1202 12:21:58.514322 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-76v9x" Dec 02 12:21:58 crc kubenswrapper[4721]: I1202 12:21:58.514403 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-76v9x" Dec 02 12:21:58 crc kubenswrapper[4721]: I1202 12:21:58.559193 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-76v9x" Dec 02 12:21:58 crc kubenswrapper[4721]: I1202 12:21:58.846519 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8xjkm" podUID="38756085-9e66-4558-98e3-f30d53668e71" containerName="registry-server" containerID="cri-o://5def17a91374044a3a6f9e6f57885466bec8744897ee5cf2c7b092031faa8ce6" gracePeriod=2 Dec 02 12:21:58 crc kubenswrapper[4721]: I1202 12:21:58.895721 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-76v9x" Dec 02 12:21:59 crc kubenswrapper[4721]: I1202 12:21:59.192211 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8xjkm" Dec 02 12:21:59 crc kubenswrapper[4721]: I1202 12:21:59.203893 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-68fjk\" (UniqueName: \"kubernetes.io/projected/38756085-9e66-4558-98e3-f30d53668e71-kube-api-access-68fjk\") pod \"38756085-9e66-4558-98e3-f30d53668e71\" (UID: \"38756085-9e66-4558-98e3-f30d53668e71\") " Dec 02 12:21:59 crc kubenswrapper[4721]: I1202 12:21:59.203952 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38756085-9e66-4558-98e3-f30d53668e71-catalog-content\") pod \"38756085-9e66-4558-98e3-f30d53668e71\" (UID: \"38756085-9e66-4558-98e3-f30d53668e71\") " Dec 02 12:21:59 crc kubenswrapper[4721]: I1202 12:21:59.203980 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38756085-9e66-4558-98e3-f30d53668e71-utilities\") pod \"38756085-9e66-4558-98e3-f30d53668e71\" (UID: \"38756085-9e66-4558-98e3-f30d53668e71\") " Dec 02 12:21:59 crc kubenswrapper[4721]: I1202 12:21:59.204921 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38756085-9e66-4558-98e3-f30d53668e71-utilities" (OuterVolumeSpecName: "utilities") pod "38756085-9e66-4558-98e3-f30d53668e71" (UID: "38756085-9e66-4558-98e3-f30d53668e71"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:21:59 crc kubenswrapper[4721]: I1202 12:21:59.222930 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38756085-9e66-4558-98e3-f30d53668e71-kube-api-access-68fjk" (OuterVolumeSpecName: "kube-api-access-68fjk") pod "38756085-9e66-4558-98e3-f30d53668e71" (UID: "38756085-9e66-4558-98e3-f30d53668e71"). InnerVolumeSpecName "kube-api-access-68fjk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:21:59 crc kubenswrapper[4721]: I1202 12:21:59.305283 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-68fjk\" (UniqueName: \"kubernetes.io/projected/38756085-9e66-4558-98e3-f30d53668e71-kube-api-access-68fjk\") on node \"crc\" DevicePath \"\"" Dec 02 12:21:59 crc kubenswrapper[4721]: I1202 12:21:59.305334 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38756085-9e66-4558-98e3-f30d53668e71-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 12:21:59 crc kubenswrapper[4721]: I1202 12:21:59.327878 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38756085-9e66-4558-98e3-f30d53668e71-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "38756085-9e66-4558-98e3-f30d53668e71" (UID: "38756085-9e66-4558-98e3-f30d53668e71"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:21:59 crc kubenswrapper[4721]: I1202 12:21:59.405942 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38756085-9e66-4558-98e3-f30d53668e71-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 12:21:59 crc kubenswrapper[4721]: I1202 12:21:59.855264 4721 generic.go:334] "Generic (PLEG): container finished" podID="38756085-9e66-4558-98e3-f30d53668e71" containerID="5def17a91374044a3a6f9e6f57885466bec8744897ee5cf2c7b092031faa8ce6" exitCode=0 Dec 02 12:21:59 crc kubenswrapper[4721]: I1202 12:21:59.855307 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8xjkm" event={"ID":"38756085-9e66-4558-98e3-f30d53668e71","Type":"ContainerDied","Data":"5def17a91374044a3a6f9e6f57885466bec8744897ee5cf2c7b092031faa8ce6"} Dec 02 12:21:59 crc kubenswrapper[4721]: I1202 12:21:59.855360 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8xjkm" Dec 02 12:21:59 crc kubenswrapper[4721]: I1202 12:21:59.855395 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8xjkm" event={"ID":"38756085-9e66-4558-98e3-f30d53668e71","Type":"ContainerDied","Data":"4430f54ef73508a6c5c1b8aef1b6509111eda4bceffbae51decb4c526810c52d"} Dec 02 12:21:59 crc kubenswrapper[4721]: I1202 12:21:59.855421 4721 scope.go:117] "RemoveContainer" containerID="5def17a91374044a3a6f9e6f57885466bec8744897ee5cf2c7b092031faa8ce6" Dec 02 12:21:59 crc kubenswrapper[4721]: I1202 12:21:59.883542 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8xjkm"] Dec 02 12:21:59 crc kubenswrapper[4721]: I1202 12:21:59.883699 4721 scope.go:117] "RemoveContainer" containerID="a8ded81a1023733a10cf158b9e1a0f408b3c01e9fe006014c8c86c4e7a455a15" Dec 02 12:21:59 crc kubenswrapper[4721]: I1202 12:21:59.889400 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8xjkm"] Dec 02 12:21:59 crc kubenswrapper[4721]: I1202 12:21:59.910053 4721 scope.go:117] "RemoveContainer" containerID="575b13ce596d179519a6edfbcdb7bc2bc96d1d225aa04daa2119afb08acc755e" Dec 02 12:21:59 crc kubenswrapper[4721]: I1202 12:21:59.925782 4721 scope.go:117] "RemoveContainer" containerID="5def17a91374044a3a6f9e6f57885466bec8744897ee5cf2c7b092031faa8ce6" Dec 02 12:21:59 crc kubenswrapper[4721]: E1202 12:21:59.926632 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5def17a91374044a3a6f9e6f57885466bec8744897ee5cf2c7b092031faa8ce6\": container with ID starting with 5def17a91374044a3a6f9e6f57885466bec8744897ee5cf2c7b092031faa8ce6 not found: ID does not exist" containerID="5def17a91374044a3a6f9e6f57885466bec8744897ee5cf2c7b092031faa8ce6" Dec 02 12:21:59 crc kubenswrapper[4721]: I1202 12:21:59.926666 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5def17a91374044a3a6f9e6f57885466bec8744897ee5cf2c7b092031faa8ce6"} err="failed to get container status \"5def17a91374044a3a6f9e6f57885466bec8744897ee5cf2c7b092031faa8ce6\": rpc error: code = NotFound desc = could not find container \"5def17a91374044a3a6f9e6f57885466bec8744897ee5cf2c7b092031faa8ce6\": container with ID starting with 5def17a91374044a3a6f9e6f57885466bec8744897ee5cf2c7b092031faa8ce6 not found: ID does not exist" Dec 02 12:21:59 crc kubenswrapper[4721]: I1202 12:21:59.926687 4721 scope.go:117] "RemoveContainer" containerID="a8ded81a1023733a10cf158b9e1a0f408b3c01e9fe006014c8c86c4e7a455a15" Dec 02 12:21:59 crc kubenswrapper[4721]: E1202 12:21:59.927152 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8ded81a1023733a10cf158b9e1a0f408b3c01e9fe006014c8c86c4e7a455a15\": container with ID starting with a8ded81a1023733a10cf158b9e1a0f408b3c01e9fe006014c8c86c4e7a455a15 not found: ID does not exist" containerID="a8ded81a1023733a10cf158b9e1a0f408b3c01e9fe006014c8c86c4e7a455a15" Dec 02 12:21:59 crc kubenswrapper[4721]: I1202 12:21:59.927179 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8ded81a1023733a10cf158b9e1a0f408b3c01e9fe006014c8c86c4e7a455a15"} err="failed to get container status \"a8ded81a1023733a10cf158b9e1a0f408b3c01e9fe006014c8c86c4e7a455a15\": rpc error: code = NotFound desc = could not find container \"a8ded81a1023733a10cf158b9e1a0f408b3c01e9fe006014c8c86c4e7a455a15\": container with ID starting with a8ded81a1023733a10cf158b9e1a0f408b3c01e9fe006014c8c86c4e7a455a15 not found: ID does not exist" Dec 02 12:21:59 crc kubenswrapper[4721]: I1202 12:21:59.927196 4721 scope.go:117] "RemoveContainer" containerID="575b13ce596d179519a6edfbcdb7bc2bc96d1d225aa04daa2119afb08acc755e" Dec 02 12:21:59 crc kubenswrapper[4721]: E1202 12:21:59.927479 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"575b13ce596d179519a6edfbcdb7bc2bc96d1d225aa04daa2119afb08acc755e\": container with ID starting with 575b13ce596d179519a6edfbcdb7bc2bc96d1d225aa04daa2119afb08acc755e not found: ID does not exist" containerID="575b13ce596d179519a6edfbcdb7bc2bc96d1d225aa04daa2119afb08acc755e" Dec 02 12:21:59 crc kubenswrapper[4721]: I1202 12:21:59.927502 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"575b13ce596d179519a6edfbcdb7bc2bc96d1d225aa04daa2119afb08acc755e"} err="failed to get container status \"575b13ce596d179519a6edfbcdb7bc2bc96d1d225aa04daa2119afb08acc755e\": rpc error: code = NotFound desc = could not find container \"575b13ce596d179519a6edfbcdb7bc2bc96d1d225aa04daa2119afb08acc755e\": container with ID starting with 575b13ce596d179519a6edfbcdb7bc2bc96d1d225aa04daa2119afb08acc755e not found: ID does not exist" Dec 02 12:22:00 crc kubenswrapper[4721]: I1202 12:22:00.567062 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-76v9x"] Dec 02 12:22:00 crc kubenswrapper[4721]: I1202 12:22:00.846086 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38756085-9e66-4558-98e3-f30d53668e71" path="/var/lib/kubelet/pods/38756085-9e66-4558-98e3-f30d53668e71/volumes" Dec 02 12:22:00 crc kubenswrapper[4721]: I1202 12:22:00.862751 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-76v9x" podUID="08910744-72ad-4df3-87bb-8f1e04ffdd00" containerName="registry-server" containerID="cri-o://8b7100c105f136d31ab7b97d88591aa27a4401682ea8414978965b87c555e5a3" gracePeriod=2 Dec 02 12:22:01 crc kubenswrapper[4721]: I1202 12:22:01.871012 4721 generic.go:334] "Generic (PLEG): container finished" podID="08910744-72ad-4df3-87bb-8f1e04ffdd00" containerID="8b7100c105f136d31ab7b97d88591aa27a4401682ea8414978965b87c555e5a3" exitCode=0 Dec 02 12:22:01 crc kubenswrapper[4721]: I1202 12:22:01.871050 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-76v9x" event={"ID":"08910744-72ad-4df3-87bb-8f1e04ffdd00","Type":"ContainerDied","Data":"8b7100c105f136d31ab7b97d88591aa27a4401682ea8414978965b87c555e5a3"} Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.008739 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-76v9x" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.035219 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08910744-72ad-4df3-87bb-8f1e04ffdd00-utilities\") pod \"08910744-72ad-4df3-87bb-8f1e04ffdd00\" (UID: \"08910744-72ad-4df3-87bb-8f1e04ffdd00\") " Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.035381 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08910744-72ad-4df3-87bb-8f1e04ffdd00-catalog-content\") pod \"08910744-72ad-4df3-87bb-8f1e04ffdd00\" (UID: \"08910744-72ad-4df3-87bb-8f1e04ffdd00\") " Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.035412 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-47vdf\" (UniqueName: \"kubernetes.io/projected/08910744-72ad-4df3-87bb-8f1e04ffdd00-kube-api-access-47vdf\") pod \"08910744-72ad-4df3-87bb-8f1e04ffdd00\" (UID: \"08910744-72ad-4df3-87bb-8f1e04ffdd00\") " Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.036130 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08910744-72ad-4df3-87bb-8f1e04ffdd00-utilities" (OuterVolumeSpecName: "utilities") pod "08910744-72ad-4df3-87bb-8f1e04ffdd00" (UID: "08910744-72ad-4df3-87bb-8f1e04ffdd00"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.048272 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08910744-72ad-4df3-87bb-8f1e04ffdd00-kube-api-access-47vdf" (OuterVolumeSpecName: "kube-api-access-47vdf") pod "08910744-72ad-4df3-87bb-8f1e04ffdd00" (UID: "08910744-72ad-4df3-87bb-8f1e04ffdd00"). InnerVolumeSpecName "kube-api-access-47vdf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.103843 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08910744-72ad-4df3-87bb-8f1e04ffdd00-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "08910744-72ad-4df3-87bb-8f1e04ffdd00" (UID: "08910744-72ad-4df3-87bb-8f1e04ffdd00"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.136689 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08910744-72ad-4df3-87bb-8f1e04ffdd00-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.136722 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-47vdf\" (UniqueName: \"kubernetes.io/projected/08910744-72ad-4df3-87bb-8f1e04ffdd00-kube-api-access-47vdf\") on node \"crc\" DevicePath \"\"" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.136747 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08910744-72ad-4df3-87bb-8f1e04ffdd00-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.338167 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-btgcl/must-gather-97dx5"] Dec 02 12:22:02 crc kubenswrapper[4721]: E1202 12:22:02.338801 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08910744-72ad-4df3-87bb-8f1e04ffdd00" containerName="extract-content" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.338813 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="08910744-72ad-4df3-87bb-8f1e04ffdd00" containerName="extract-content" Dec 02 12:22:02 crc kubenswrapper[4721]: E1202 12:22:02.338826 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38756085-9e66-4558-98e3-f30d53668e71" containerName="registry-server" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.338832 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="38756085-9e66-4558-98e3-f30d53668e71" containerName="registry-server" Dec 02 12:22:02 crc kubenswrapper[4721]: E1202 12:22:02.338839 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08910744-72ad-4df3-87bb-8f1e04ffdd00" containerName="extract-utilities" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.338845 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="08910744-72ad-4df3-87bb-8f1e04ffdd00" containerName="extract-utilities" Dec 02 12:22:02 crc kubenswrapper[4721]: E1202 12:22:02.338860 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08910744-72ad-4df3-87bb-8f1e04ffdd00" containerName="registry-server" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.338866 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="08910744-72ad-4df3-87bb-8f1e04ffdd00" containerName="registry-server" Dec 02 12:22:02 crc kubenswrapper[4721]: E1202 12:22:02.338875 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38756085-9e66-4558-98e3-f30d53668e71" containerName="extract-content" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.338880 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="38756085-9e66-4558-98e3-f30d53668e71" containerName="extract-content" Dec 02 12:22:02 crc kubenswrapper[4721]: E1202 12:22:02.338892 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38756085-9e66-4558-98e3-f30d53668e71" containerName="extract-utilities" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.338897 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="38756085-9e66-4558-98e3-f30d53668e71" containerName="extract-utilities" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.338995 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="38756085-9e66-4558-98e3-f30d53668e71" containerName="registry-server" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.339010 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="08910744-72ad-4df3-87bb-8f1e04ffdd00" containerName="registry-server" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.339581 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-btgcl/must-gather-97dx5" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.343993 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-btgcl"/"kube-root-ca.crt" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.344051 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-btgcl"/"openshift-service-ca.crt" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.344051 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-btgcl"/"default-dockercfg-pbwjh" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.350494 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-btgcl/must-gather-97dx5"] Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.438963 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfspq\" (UniqueName: \"kubernetes.io/projected/3f052330-db77-46bd-abce-2a6413a6da3b-kube-api-access-lfspq\") pod \"must-gather-97dx5\" (UID: \"3f052330-db77-46bd-abce-2a6413a6da3b\") " pod="openshift-must-gather-btgcl/must-gather-97dx5" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.439006 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3f052330-db77-46bd-abce-2a6413a6da3b-must-gather-output\") pod \"must-gather-97dx5\" (UID: \"3f052330-db77-46bd-abce-2a6413a6da3b\") " pod="openshift-must-gather-btgcl/must-gather-97dx5" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.540237 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfspq\" (UniqueName: \"kubernetes.io/projected/3f052330-db77-46bd-abce-2a6413a6da3b-kube-api-access-lfspq\") pod \"must-gather-97dx5\" (UID: \"3f052330-db77-46bd-abce-2a6413a6da3b\") " pod="openshift-must-gather-btgcl/must-gather-97dx5" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.540285 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3f052330-db77-46bd-abce-2a6413a6da3b-must-gather-output\") pod \"must-gather-97dx5\" (UID: \"3f052330-db77-46bd-abce-2a6413a6da3b\") " pod="openshift-must-gather-btgcl/must-gather-97dx5" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.540747 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3f052330-db77-46bd-abce-2a6413a6da3b-must-gather-output\") pod \"must-gather-97dx5\" (UID: \"3f052330-db77-46bd-abce-2a6413a6da3b\") " pod="openshift-must-gather-btgcl/must-gather-97dx5" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.561444 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfspq\" (UniqueName: \"kubernetes.io/projected/3f052330-db77-46bd-abce-2a6413a6da3b-kube-api-access-lfspq\") pod \"must-gather-97dx5\" (UID: \"3f052330-db77-46bd-abce-2a6413a6da3b\") " pod="openshift-must-gather-btgcl/must-gather-97dx5" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.653929 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-btgcl/must-gather-97dx5" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.829005 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-btgcl/must-gather-97dx5"] Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.880843 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-btgcl/must-gather-97dx5" event={"ID":"3f052330-db77-46bd-abce-2a6413a6da3b","Type":"ContainerStarted","Data":"fcd4552bebde802b2402ad5de5e484042fa5d90f7779187ccd157448e8e27f2e"} Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.883018 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-76v9x" event={"ID":"08910744-72ad-4df3-87bb-8f1e04ffdd00","Type":"ContainerDied","Data":"0a7f550715b9cfa827f2a5807a0a1e7d7d2b588ee79eddb1cc2efdc07fe70c48"} Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.883280 4721 scope.go:117] "RemoveContainer" containerID="8b7100c105f136d31ab7b97d88591aa27a4401682ea8414978965b87c555e5a3" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.883066 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-76v9x" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.901237 4721 scope.go:117] "RemoveContainer" containerID="aed9ebeb97cbb061b16de099bc0f26cacf112c8d37298d997584acccbea02bb5" Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.905159 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-76v9x"] Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.910190 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-76v9x"] Dec 02 12:22:02 crc kubenswrapper[4721]: I1202 12:22:02.921740 4721 scope.go:117] "RemoveContainer" containerID="6d882f2d4e945b5ba0f103e7379a0615356ad8508ea3f1711d6090508ed4c1c1" Dec 02 12:22:03 crc kubenswrapper[4721]: I1202 12:22:03.889331 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-btgcl/must-gather-97dx5" event={"ID":"3f052330-db77-46bd-abce-2a6413a6da3b","Type":"ContainerStarted","Data":"e73e882972b057caf03cadc446b58aa071d61a14f6866f5655ceb1c30b4912a5"} Dec 02 12:22:03 crc kubenswrapper[4721]: I1202 12:22:03.889406 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-btgcl/must-gather-97dx5" event={"ID":"3f052330-db77-46bd-abce-2a6413a6da3b","Type":"ContainerStarted","Data":"566c7295b9700f8c7d497a5a676812741ffadfcd06613c6c76465a83ccd12f42"} Dec 02 12:22:03 crc kubenswrapper[4721]: I1202 12:22:03.906472 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-btgcl/must-gather-97dx5" podStartSLOduration=1.906458238 podStartE2EDuration="1.906458238s" podCreationTimestamp="2025-12-02 12:22:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 12:22:03.904190487 +0000 UTC m=+1853.642740856" watchObservedRunningTime="2025-12-02 12:22:03.906458238 +0000 UTC m=+1853.645008587" Dec 02 12:22:04 crc kubenswrapper[4721]: I1202 12:22:04.848061 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08910744-72ad-4df3-87bb-8f1e04ffdd00" path="/var/lib/kubelet/pods/08910744-72ad-4df3-87bb-8f1e04ffdd00/volumes" Dec 02 12:22:10 crc kubenswrapper[4721]: I1202 12:22:10.843704 4721 scope.go:117] "RemoveContainer" containerID="d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0" Dec 02 12:22:10 crc kubenswrapper[4721]: E1202 12:22:10.844337 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mcr6s_openshift-machine-config-operator(141842b6-fd0c-48e6-9795-a627e6f96bd4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" Dec 02 12:22:15 crc kubenswrapper[4721]: I1202 12:22:15.174675 4721 scope.go:117] "RemoveContainer" containerID="1cd990a7ed91654d8b437e55c168088d85662fcde7818a64ee0d169f100c70f1" Dec 02 12:22:21 crc kubenswrapper[4721]: I1202 12:22:21.839847 4721 scope.go:117] "RemoveContainer" containerID="d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0" Dec 02 12:22:21 crc kubenswrapper[4721]: E1202 12:22:21.840297 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mcr6s_openshift-machine-config-operator(141842b6-fd0c-48e6-9795-a627e6f96bd4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" Dec 02 12:22:35 crc kubenswrapper[4721]: I1202 12:22:34.842263 4721 scope.go:117] "RemoveContainer" containerID="d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0" Dec 02 12:22:35 crc kubenswrapper[4721]: E1202 12:22:34.843212 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mcr6s_openshift-machine-config-operator(141842b6-fd0c-48e6-9795-a627e6f96bd4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" Dec 02 12:22:35 crc kubenswrapper[4721]: I1202 12:22:35.415796 4721 patch_prober.go:28] interesting pod/router-default-5444994796-wtlwb container/router namespace/openshift-ingress: Liveness probe status=failure output="Get \"http://localhost:1936/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 02 12:22:35 crc kubenswrapper[4721]: I1202 12:22:35.415982 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-ingress/router-default-5444994796-wtlwb" podUID="a26573b3-3899-4b81-ba01-fabea24142cc" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 12:22:45 crc kubenswrapper[4721]: I1202 12:22:45.875390 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-drfrd_18b11741-408b-4270-abd3-b43fd24149ab/control-plane-machine-set-operator/0.log" Dec 02 12:22:46 crc kubenswrapper[4721]: I1202 12:22:46.025874 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-zjwgh_b464a77f-4bf7-4719-98c2-d07e5bde858a/kube-rbac-proxy/0.log" Dec 02 12:22:46 crc kubenswrapper[4721]: I1202 12:22:46.046774 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-zjwgh_b464a77f-4bf7-4719-98c2-d07e5bde858a/machine-api-operator/0.log" Dec 02 12:22:46 crc kubenswrapper[4721]: I1202 12:22:46.844248 4721 scope.go:117] "RemoveContainer" containerID="d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0" Dec 02 12:22:46 crc kubenswrapper[4721]: E1202 12:22:46.844588 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mcr6s_openshift-machine-config-operator(141842b6-fd0c-48e6-9795-a627e6f96bd4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" Dec 02 12:22:59 crc kubenswrapper[4721]: I1202 12:22:59.613666 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-clfzp_15a0fceb-bf2e-4a05-870e-7501c772ae6c/kube-rbac-proxy/0.log" Dec 02 12:22:59 crc kubenswrapper[4721]: I1202 12:22:59.684771 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-clfzp_15a0fceb-bf2e-4a05-870e-7501c772ae6c/controller/0.log" Dec 02 12:22:59 crc kubenswrapper[4721]: I1202 12:22:59.783476 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/cp-frr-files/0.log" Dec 02 12:22:59 crc kubenswrapper[4721]: I1202 12:22:59.839593 4721 scope.go:117] "RemoveContainer" containerID="d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0" Dec 02 12:22:59 crc kubenswrapper[4721]: E1202 12:22:59.839894 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mcr6s_openshift-machine-config-operator(141842b6-fd0c-48e6-9795-a627e6f96bd4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" Dec 02 12:22:59 crc kubenswrapper[4721]: I1202 12:22:59.908485 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/cp-frr-files/0.log" Dec 02 12:22:59 crc kubenswrapper[4721]: I1202 12:22:59.948910 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/cp-reloader/0.log" Dec 02 12:22:59 crc kubenswrapper[4721]: I1202 12:22:59.977033 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/cp-metrics/0.log" Dec 02 12:22:59 crc kubenswrapper[4721]: I1202 12:22:59.987766 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/cp-reloader/0.log" Dec 02 12:23:00 crc kubenswrapper[4721]: I1202 12:23:00.132270 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/cp-reloader/0.log" Dec 02 12:23:00 crc kubenswrapper[4721]: I1202 12:23:00.150584 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/cp-frr-files/0.log" Dec 02 12:23:00 crc kubenswrapper[4721]: I1202 12:23:00.171770 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/cp-metrics/0.log" Dec 02 12:23:00 crc kubenswrapper[4721]: I1202 12:23:00.207774 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/cp-metrics/0.log" Dec 02 12:23:00 crc kubenswrapper[4721]: I1202 12:23:00.321891 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/cp-metrics/0.log" Dec 02 12:23:00 crc kubenswrapper[4721]: I1202 12:23:00.328854 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/cp-reloader/0.log" Dec 02 12:23:00 crc kubenswrapper[4721]: I1202 12:23:00.353408 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/cp-frr-files/0.log" Dec 02 12:23:00 crc kubenswrapper[4721]: I1202 12:23:00.415482 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/controller/0.log" Dec 02 12:23:00 crc kubenswrapper[4721]: I1202 12:23:00.469470 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/frr-metrics/0.log" Dec 02 12:23:00 crc kubenswrapper[4721]: I1202 12:23:00.532544 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/kube-rbac-proxy/0.log" Dec 02 12:23:00 crc kubenswrapper[4721]: I1202 12:23:00.612639 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/kube-rbac-proxy-frr/0.log" Dec 02 12:23:00 crc kubenswrapper[4721]: I1202 12:23:00.663416 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/reloader/0.log" Dec 02 12:23:00 crc kubenswrapper[4721]: I1202 12:23:00.784161 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-qmwqj_33b0035f-5cb5-458f-a5f9-498a488c358f/frr-k8s-webhook-server/0.log" Dec 02 12:23:00 crc kubenswrapper[4721]: I1202 12:23:00.912286 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-86c7ccf9f9-b95bs_c7571582-1d03-4d8d-92f0-0ec120163508/manager/0.log" Dec 02 12:23:01 crc kubenswrapper[4721]: I1202 12:23:01.062390 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5c48b86c94-n454v_1e7d973f-0100-4daa-b4ca-f6f8f9b90a22/webhook-server/0.log" Dec 02 12:23:01 crc kubenswrapper[4721]: I1202 12:23:01.092142 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-69bnp_9e956286-b38a-40dc-b65c-ab7395592012/frr/0.log" Dec 02 12:23:01 crc kubenswrapper[4721]: I1202 12:23:01.174798 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-m4dmx_b91afd1f-b9c6-49e2-8c26-93bdf658600e/kube-rbac-proxy/0.log" Dec 02 12:23:01 crc kubenswrapper[4721]: I1202 12:23:01.316713 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-m4dmx_b91afd1f-b9c6-49e2-8c26-93bdf658600e/speaker/0.log" Dec 02 12:23:14 crc kubenswrapper[4721]: I1202 12:23:14.840479 4721 scope.go:117] "RemoveContainer" containerID="d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0" Dec 02 12:23:14 crc kubenswrapper[4721]: E1202 12:23:14.841182 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mcr6s_openshift-machine-config-operator(141842b6-fd0c-48e6-9795-a627e6f96bd4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" Dec 02 12:23:15 crc kubenswrapper[4721]: I1202 12:23:15.236019 4721 scope.go:117] "RemoveContainer" containerID="3478b9da40dbe812afcf1a71b6fe29bbb14d1c384071998f05bcf6875780e41f" Dec 02 12:23:22 crc kubenswrapper[4721]: I1202 12:23:22.408039 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl_dc649529-91a2-4d31-9c9c-1ad1523d7182/util/0.log" Dec 02 12:23:22 crc kubenswrapper[4721]: I1202 12:23:22.574620 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl_dc649529-91a2-4d31-9c9c-1ad1523d7182/util/0.log" Dec 02 12:23:22 crc kubenswrapper[4721]: I1202 12:23:22.620233 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl_dc649529-91a2-4d31-9c9c-1ad1523d7182/pull/0.log" Dec 02 12:23:22 crc kubenswrapper[4721]: I1202 12:23:22.629533 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl_dc649529-91a2-4d31-9c9c-1ad1523d7182/pull/0.log" Dec 02 12:23:22 crc kubenswrapper[4721]: I1202 12:23:22.757676 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl_dc649529-91a2-4d31-9c9c-1ad1523d7182/extract/0.log" Dec 02 12:23:22 crc kubenswrapper[4721]: I1202 12:23:22.758064 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl_dc649529-91a2-4d31-9c9c-1ad1523d7182/util/0.log" Dec 02 12:23:22 crc kubenswrapper[4721]: I1202 12:23:22.759836 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834b7dl_dc649529-91a2-4d31-9c9c-1ad1523d7182/pull/0.log" Dec 02 12:23:22 crc kubenswrapper[4721]: I1202 12:23:22.899649 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-wwzsl_0cd807a4-666e-4d8e-937d-bddbe10b5587/extract-utilities/0.log" Dec 02 12:23:23 crc kubenswrapper[4721]: I1202 12:23:23.038835 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-wwzsl_0cd807a4-666e-4d8e-937d-bddbe10b5587/extract-utilities/0.log" Dec 02 12:23:23 crc kubenswrapper[4721]: I1202 12:23:23.049620 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-wwzsl_0cd807a4-666e-4d8e-937d-bddbe10b5587/extract-content/0.log" Dec 02 12:23:23 crc kubenswrapper[4721]: I1202 12:23:23.055452 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-wwzsl_0cd807a4-666e-4d8e-937d-bddbe10b5587/extract-content/0.log" Dec 02 12:23:23 crc kubenswrapper[4721]: I1202 12:23:23.192060 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-wwzsl_0cd807a4-666e-4d8e-937d-bddbe10b5587/extract-content/0.log" Dec 02 12:23:23 crc kubenswrapper[4721]: I1202 12:23:23.199840 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-wwzsl_0cd807a4-666e-4d8e-937d-bddbe10b5587/extract-utilities/0.log" Dec 02 12:23:23 crc kubenswrapper[4721]: I1202 12:23:23.379502 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-k95pl_f0efa73b-b118-4069-a168-bb26db513ef7/extract-utilities/0.log" Dec 02 12:23:23 crc kubenswrapper[4721]: I1202 12:23:23.559403 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-wwzsl_0cd807a4-666e-4d8e-937d-bddbe10b5587/registry-server/0.log" Dec 02 12:23:23 crc kubenswrapper[4721]: I1202 12:23:23.593469 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-k95pl_f0efa73b-b118-4069-a168-bb26db513ef7/extract-content/0.log" Dec 02 12:23:23 crc kubenswrapper[4721]: I1202 12:23:23.594817 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-k95pl_f0efa73b-b118-4069-a168-bb26db513ef7/extract-utilities/0.log" Dec 02 12:23:23 crc kubenswrapper[4721]: I1202 12:23:23.600911 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-k95pl_f0efa73b-b118-4069-a168-bb26db513ef7/extract-content/0.log" Dec 02 12:23:23 crc kubenswrapper[4721]: I1202 12:23:23.811443 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-k95pl_f0efa73b-b118-4069-a168-bb26db513ef7/extract-utilities/0.log" Dec 02 12:23:23 crc kubenswrapper[4721]: I1202 12:23:23.855020 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-k95pl_f0efa73b-b118-4069-a168-bb26db513ef7/extract-content/0.log" Dec 02 12:23:24 crc kubenswrapper[4721]: I1202 12:23:24.034902 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-w6v9r_9b2d6ebc-35e8-4617-ab74-d40b1a19d84e/marketplace-operator/0.log" Dec 02 12:23:24 crc kubenswrapper[4721]: I1202 12:23:24.057869 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-k95pl_f0efa73b-b118-4069-a168-bb26db513ef7/registry-server/0.log" Dec 02 12:23:24 crc kubenswrapper[4721]: I1202 12:23:24.083058 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-692sp_94263503-850e-479a-9335-8036ea37b30f/extract-utilities/0.log" Dec 02 12:23:24 crc kubenswrapper[4721]: I1202 12:23:24.230315 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-692sp_94263503-850e-479a-9335-8036ea37b30f/extract-utilities/0.log" Dec 02 12:23:24 crc kubenswrapper[4721]: I1202 12:23:24.264825 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-692sp_94263503-850e-479a-9335-8036ea37b30f/extract-content/0.log" Dec 02 12:23:24 crc kubenswrapper[4721]: I1202 12:23:24.276793 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-692sp_94263503-850e-479a-9335-8036ea37b30f/extract-content/0.log" Dec 02 12:23:24 crc kubenswrapper[4721]: I1202 12:23:24.454843 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-692sp_94263503-850e-479a-9335-8036ea37b30f/extract-utilities/0.log" Dec 02 12:23:24 crc kubenswrapper[4721]: I1202 12:23:24.488882 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-692sp_94263503-850e-479a-9335-8036ea37b30f/extract-content/0.log" Dec 02 12:23:24 crc kubenswrapper[4721]: I1202 12:23:24.562867 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-692sp_94263503-850e-479a-9335-8036ea37b30f/registry-server/0.log" Dec 02 12:23:24 crc kubenswrapper[4721]: I1202 12:23:24.654684 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fd78g_e586d5d2-126d-4886-af16-793d937c0eaf/extract-utilities/0.log" Dec 02 12:23:24 crc kubenswrapper[4721]: I1202 12:23:24.818785 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fd78g_e586d5d2-126d-4886-af16-793d937c0eaf/extract-content/0.log" Dec 02 12:23:24 crc kubenswrapper[4721]: I1202 12:23:24.822500 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fd78g_e586d5d2-126d-4886-af16-793d937c0eaf/extract-utilities/0.log" Dec 02 12:23:24 crc kubenswrapper[4721]: I1202 12:23:24.864149 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fd78g_e586d5d2-126d-4886-af16-793d937c0eaf/extract-content/0.log" Dec 02 12:23:24 crc kubenswrapper[4721]: I1202 12:23:24.998073 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fd78g_e586d5d2-126d-4886-af16-793d937c0eaf/extract-utilities/0.log" Dec 02 12:23:25 crc kubenswrapper[4721]: I1202 12:23:25.025194 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fd78g_e586d5d2-126d-4886-af16-793d937c0eaf/extract-content/0.log" Dec 02 12:23:25 crc kubenswrapper[4721]: I1202 12:23:25.359829 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-fd78g_e586d5d2-126d-4886-af16-793d937c0eaf/registry-server/0.log" Dec 02 12:23:29 crc kubenswrapper[4721]: I1202 12:23:29.840657 4721 scope.go:117] "RemoveContainer" containerID="d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0" Dec 02 12:23:29 crc kubenswrapper[4721]: E1202 12:23:29.841823 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mcr6s_openshift-machine-config-operator(141842b6-fd0c-48e6-9795-a627e6f96bd4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" Dec 02 12:23:41 crc kubenswrapper[4721]: I1202 12:23:41.840518 4721 scope.go:117] "RemoveContainer" containerID="d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0" Dec 02 12:23:41 crc kubenswrapper[4721]: E1202 12:23:41.841590 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mcr6s_openshift-machine-config-operator(141842b6-fd0c-48e6-9795-a627e6f96bd4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" Dec 02 12:23:52 crc kubenswrapper[4721]: I1202 12:23:52.840412 4721 scope.go:117] "RemoveContainer" containerID="d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0" Dec 02 12:23:52 crc kubenswrapper[4721]: E1202 12:23:52.841196 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mcr6s_openshift-machine-config-operator(141842b6-fd0c-48e6-9795-a627e6f96bd4)\"" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" Dec 02 12:24:05 crc kubenswrapper[4721]: I1202 12:24:05.839838 4721 scope.go:117] "RemoveContainer" containerID="d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0" Dec 02 12:24:06 crc kubenswrapper[4721]: I1202 12:24:06.928561 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" event={"ID":"141842b6-fd0c-48e6-9795-a627e6f96bd4","Type":"ContainerStarted","Data":"62151d4a43d83a5d0223d3c4879bfaddf3fd628843a8798a6571d0b223e88ec5"} Dec 02 12:24:28 crc kubenswrapper[4721]: I1202 12:24:28.049780 4721 generic.go:334] "Generic (PLEG): container finished" podID="3f052330-db77-46bd-abce-2a6413a6da3b" containerID="566c7295b9700f8c7d497a5a676812741ffadfcd06613c6c76465a83ccd12f42" exitCode=0 Dec 02 12:24:28 crc kubenswrapper[4721]: I1202 12:24:28.049881 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-btgcl/must-gather-97dx5" event={"ID":"3f052330-db77-46bd-abce-2a6413a6da3b","Type":"ContainerDied","Data":"566c7295b9700f8c7d497a5a676812741ffadfcd06613c6c76465a83ccd12f42"} Dec 02 12:24:28 crc kubenswrapper[4721]: I1202 12:24:28.051487 4721 scope.go:117] "RemoveContainer" containerID="566c7295b9700f8c7d497a5a676812741ffadfcd06613c6c76465a83ccd12f42" Dec 02 12:24:28 crc kubenswrapper[4721]: I1202 12:24:28.453682 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-btgcl_must-gather-97dx5_3f052330-db77-46bd-abce-2a6413a6da3b/gather/0.log" Dec 02 12:24:32 crc kubenswrapper[4721]: I1202 12:24:32.791179 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9n279"] Dec 02 12:24:32 crc kubenswrapper[4721]: I1202 12:24:32.793061 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9n279" Dec 02 12:24:32 crc kubenswrapper[4721]: I1202 12:24:32.802316 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9n279"] Dec 02 12:24:32 crc kubenswrapper[4721]: I1202 12:24:32.993061 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5ce0109-b662-4270-9443-49a7164d2e6b-catalog-content\") pod \"redhat-operators-9n279\" (UID: \"f5ce0109-b662-4270-9443-49a7164d2e6b\") " pod="openshift-marketplace/redhat-operators-9n279" Dec 02 12:24:32 crc kubenswrapper[4721]: I1202 12:24:32.993120 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7cgcp\" (UniqueName: \"kubernetes.io/projected/f5ce0109-b662-4270-9443-49a7164d2e6b-kube-api-access-7cgcp\") pod \"redhat-operators-9n279\" (UID: \"f5ce0109-b662-4270-9443-49a7164d2e6b\") " pod="openshift-marketplace/redhat-operators-9n279" Dec 02 12:24:32 crc kubenswrapper[4721]: I1202 12:24:32.993157 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5ce0109-b662-4270-9443-49a7164d2e6b-utilities\") pod \"redhat-operators-9n279\" (UID: \"f5ce0109-b662-4270-9443-49a7164d2e6b\") " pod="openshift-marketplace/redhat-operators-9n279" Dec 02 12:24:33 crc kubenswrapper[4721]: I1202 12:24:33.094926 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5ce0109-b662-4270-9443-49a7164d2e6b-catalog-content\") pod \"redhat-operators-9n279\" (UID: \"f5ce0109-b662-4270-9443-49a7164d2e6b\") " pod="openshift-marketplace/redhat-operators-9n279" Dec 02 12:24:33 crc kubenswrapper[4721]: I1202 12:24:33.094997 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7cgcp\" (UniqueName: \"kubernetes.io/projected/f5ce0109-b662-4270-9443-49a7164d2e6b-kube-api-access-7cgcp\") pod \"redhat-operators-9n279\" (UID: \"f5ce0109-b662-4270-9443-49a7164d2e6b\") " pod="openshift-marketplace/redhat-operators-9n279" Dec 02 12:24:33 crc kubenswrapper[4721]: I1202 12:24:33.095060 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5ce0109-b662-4270-9443-49a7164d2e6b-utilities\") pod \"redhat-operators-9n279\" (UID: \"f5ce0109-b662-4270-9443-49a7164d2e6b\") " pod="openshift-marketplace/redhat-operators-9n279" Dec 02 12:24:33 crc kubenswrapper[4721]: I1202 12:24:33.095588 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5ce0109-b662-4270-9443-49a7164d2e6b-utilities\") pod \"redhat-operators-9n279\" (UID: \"f5ce0109-b662-4270-9443-49a7164d2e6b\") " pod="openshift-marketplace/redhat-operators-9n279" Dec 02 12:24:33 crc kubenswrapper[4721]: I1202 12:24:33.095905 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5ce0109-b662-4270-9443-49a7164d2e6b-catalog-content\") pod \"redhat-operators-9n279\" (UID: \"f5ce0109-b662-4270-9443-49a7164d2e6b\") " pod="openshift-marketplace/redhat-operators-9n279" Dec 02 12:24:33 crc kubenswrapper[4721]: I1202 12:24:33.116543 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7cgcp\" (UniqueName: \"kubernetes.io/projected/f5ce0109-b662-4270-9443-49a7164d2e6b-kube-api-access-7cgcp\") pod \"redhat-operators-9n279\" (UID: \"f5ce0109-b662-4270-9443-49a7164d2e6b\") " pod="openshift-marketplace/redhat-operators-9n279" Dec 02 12:24:33 crc kubenswrapper[4721]: I1202 12:24:33.160572 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9n279" Dec 02 12:24:33 crc kubenswrapper[4721]: I1202 12:24:33.381232 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9n279"] Dec 02 12:24:34 crc kubenswrapper[4721]: I1202 12:24:34.090217 4721 generic.go:334] "Generic (PLEG): container finished" podID="f5ce0109-b662-4270-9443-49a7164d2e6b" containerID="044a8535c861e3e4368796a9b8fb76e4b31d8a2846d58c02a6fe565bbd95dfaa" exitCode=0 Dec 02 12:24:34 crc kubenswrapper[4721]: I1202 12:24:34.090317 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9n279" event={"ID":"f5ce0109-b662-4270-9443-49a7164d2e6b","Type":"ContainerDied","Data":"044a8535c861e3e4368796a9b8fb76e4b31d8a2846d58c02a6fe565bbd95dfaa"} Dec 02 12:24:34 crc kubenswrapper[4721]: I1202 12:24:34.090564 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9n279" event={"ID":"f5ce0109-b662-4270-9443-49a7164d2e6b","Type":"ContainerStarted","Data":"228d0cf4b3d4047d95e903315066c1971fb1da16fbbd3cfa6a7d5edb179d2df0"} Dec 02 12:24:35 crc kubenswrapper[4721]: I1202 12:24:35.098206 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9n279" event={"ID":"f5ce0109-b662-4270-9443-49a7164d2e6b","Type":"ContainerStarted","Data":"cc48dcdb661c723c2763eb0d870cf798cc9b12e24635c987f96b019812cc8704"} Dec 02 12:24:36 crc kubenswrapper[4721]: I1202 12:24:36.109229 4721 generic.go:334] "Generic (PLEG): container finished" podID="f5ce0109-b662-4270-9443-49a7164d2e6b" containerID="cc48dcdb661c723c2763eb0d870cf798cc9b12e24635c987f96b019812cc8704" exitCode=0 Dec 02 12:24:36 crc kubenswrapper[4721]: I1202 12:24:36.109384 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9n279" event={"ID":"f5ce0109-b662-4270-9443-49a7164d2e6b","Type":"ContainerDied","Data":"cc48dcdb661c723c2763eb0d870cf798cc9b12e24635c987f96b019812cc8704"} Dec 02 12:24:37 crc kubenswrapper[4721]: I1202 12:24:37.057761 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-btgcl/must-gather-97dx5"] Dec 02 12:24:37 crc kubenswrapper[4721]: I1202 12:24:37.058222 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-btgcl/must-gather-97dx5" podUID="3f052330-db77-46bd-abce-2a6413a6da3b" containerName="copy" containerID="cri-o://e73e882972b057caf03cadc446b58aa071d61a14f6866f5655ceb1c30b4912a5" gracePeriod=2 Dec 02 12:24:37 crc kubenswrapper[4721]: I1202 12:24:37.061248 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-btgcl/must-gather-97dx5"] Dec 02 12:24:37 crc kubenswrapper[4721]: I1202 12:24:37.116366 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9n279" event={"ID":"f5ce0109-b662-4270-9443-49a7164d2e6b","Type":"ContainerStarted","Data":"48d625d446b5624080973c9259e71c75ae402d140ba452c3bc9b4c2d310de587"} Dec 02 12:24:37 crc kubenswrapper[4721]: I1202 12:24:37.137999 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9n279" podStartSLOduration=2.519346671 podStartE2EDuration="5.137982191s" podCreationTimestamp="2025-12-02 12:24:32 +0000 UTC" firstStartedPulling="2025-12-02 12:24:34.09205662 +0000 UTC m=+2003.830606969" lastFinishedPulling="2025-12-02 12:24:36.71069214 +0000 UTC m=+2006.449242489" observedRunningTime="2025-12-02 12:24:37.130851221 +0000 UTC m=+2006.869401560" watchObservedRunningTime="2025-12-02 12:24:37.137982191 +0000 UTC m=+2006.876532540" Dec 02 12:24:37 crc kubenswrapper[4721]: I1202 12:24:37.380260 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-btgcl_must-gather-97dx5_3f052330-db77-46bd-abce-2a6413a6da3b/copy/0.log" Dec 02 12:24:37 crc kubenswrapper[4721]: I1202 12:24:37.380698 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-btgcl/must-gather-97dx5" Dec 02 12:24:37 crc kubenswrapper[4721]: I1202 12:24:37.556945 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3f052330-db77-46bd-abce-2a6413a6da3b-must-gather-output\") pod \"3f052330-db77-46bd-abce-2a6413a6da3b\" (UID: \"3f052330-db77-46bd-abce-2a6413a6da3b\") " Dec 02 12:24:37 crc kubenswrapper[4721]: I1202 12:24:37.557069 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lfspq\" (UniqueName: \"kubernetes.io/projected/3f052330-db77-46bd-abce-2a6413a6da3b-kube-api-access-lfspq\") pod \"3f052330-db77-46bd-abce-2a6413a6da3b\" (UID: \"3f052330-db77-46bd-abce-2a6413a6da3b\") " Dec 02 12:24:37 crc kubenswrapper[4721]: I1202 12:24:37.562720 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f052330-db77-46bd-abce-2a6413a6da3b-kube-api-access-lfspq" (OuterVolumeSpecName: "kube-api-access-lfspq") pod "3f052330-db77-46bd-abce-2a6413a6da3b" (UID: "3f052330-db77-46bd-abce-2a6413a6da3b"). InnerVolumeSpecName "kube-api-access-lfspq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:24:37 crc kubenswrapper[4721]: I1202 12:24:37.618129 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f052330-db77-46bd-abce-2a6413a6da3b-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "3f052330-db77-46bd-abce-2a6413a6da3b" (UID: "3f052330-db77-46bd-abce-2a6413a6da3b"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:24:37 crc kubenswrapper[4721]: I1202 12:24:37.658302 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lfspq\" (UniqueName: \"kubernetes.io/projected/3f052330-db77-46bd-abce-2a6413a6da3b-kube-api-access-lfspq\") on node \"crc\" DevicePath \"\"" Dec 02 12:24:37 crc kubenswrapper[4721]: I1202 12:24:37.658336 4721 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3f052330-db77-46bd-abce-2a6413a6da3b-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 02 12:24:38 crc kubenswrapper[4721]: I1202 12:24:38.123650 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-btgcl_must-gather-97dx5_3f052330-db77-46bd-abce-2a6413a6da3b/copy/0.log" Dec 02 12:24:38 crc kubenswrapper[4721]: I1202 12:24:38.123948 4721 generic.go:334] "Generic (PLEG): container finished" podID="3f052330-db77-46bd-abce-2a6413a6da3b" containerID="e73e882972b057caf03cadc446b58aa071d61a14f6866f5655ceb1c30b4912a5" exitCode=143 Dec 02 12:24:38 crc kubenswrapper[4721]: I1202 12:24:38.124527 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-btgcl/must-gather-97dx5" Dec 02 12:24:38 crc kubenswrapper[4721]: I1202 12:24:38.124539 4721 scope.go:117] "RemoveContainer" containerID="e73e882972b057caf03cadc446b58aa071d61a14f6866f5655ceb1c30b4912a5" Dec 02 12:24:38 crc kubenswrapper[4721]: I1202 12:24:38.140543 4721 scope.go:117] "RemoveContainer" containerID="566c7295b9700f8c7d497a5a676812741ffadfcd06613c6c76465a83ccd12f42" Dec 02 12:24:38 crc kubenswrapper[4721]: I1202 12:24:38.173280 4721 scope.go:117] "RemoveContainer" containerID="e73e882972b057caf03cadc446b58aa071d61a14f6866f5655ceb1c30b4912a5" Dec 02 12:24:38 crc kubenswrapper[4721]: E1202 12:24:38.173656 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e73e882972b057caf03cadc446b58aa071d61a14f6866f5655ceb1c30b4912a5\": container with ID starting with e73e882972b057caf03cadc446b58aa071d61a14f6866f5655ceb1c30b4912a5 not found: ID does not exist" containerID="e73e882972b057caf03cadc446b58aa071d61a14f6866f5655ceb1c30b4912a5" Dec 02 12:24:38 crc kubenswrapper[4721]: I1202 12:24:38.173693 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e73e882972b057caf03cadc446b58aa071d61a14f6866f5655ceb1c30b4912a5"} err="failed to get container status \"e73e882972b057caf03cadc446b58aa071d61a14f6866f5655ceb1c30b4912a5\": rpc error: code = NotFound desc = could not find container \"e73e882972b057caf03cadc446b58aa071d61a14f6866f5655ceb1c30b4912a5\": container with ID starting with e73e882972b057caf03cadc446b58aa071d61a14f6866f5655ceb1c30b4912a5 not found: ID does not exist" Dec 02 12:24:38 crc kubenswrapper[4721]: I1202 12:24:38.173715 4721 scope.go:117] "RemoveContainer" containerID="566c7295b9700f8c7d497a5a676812741ffadfcd06613c6c76465a83ccd12f42" Dec 02 12:24:38 crc kubenswrapper[4721]: E1202 12:24:38.173905 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"566c7295b9700f8c7d497a5a676812741ffadfcd06613c6c76465a83ccd12f42\": container with ID starting with 566c7295b9700f8c7d497a5a676812741ffadfcd06613c6c76465a83ccd12f42 not found: ID does not exist" containerID="566c7295b9700f8c7d497a5a676812741ffadfcd06613c6c76465a83ccd12f42" Dec 02 12:24:38 crc kubenswrapper[4721]: I1202 12:24:38.173927 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"566c7295b9700f8c7d497a5a676812741ffadfcd06613c6c76465a83ccd12f42"} err="failed to get container status \"566c7295b9700f8c7d497a5a676812741ffadfcd06613c6c76465a83ccd12f42\": rpc error: code = NotFound desc = could not find container \"566c7295b9700f8c7d497a5a676812741ffadfcd06613c6c76465a83ccd12f42\": container with ID starting with 566c7295b9700f8c7d497a5a676812741ffadfcd06613c6c76465a83ccd12f42 not found: ID does not exist" Dec 02 12:24:38 crc kubenswrapper[4721]: I1202 12:24:38.845629 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f052330-db77-46bd-abce-2a6413a6da3b" path="/var/lib/kubelet/pods/3f052330-db77-46bd-abce-2a6413a6da3b/volumes" Dec 02 12:24:43 crc kubenswrapper[4721]: I1202 12:24:43.161802 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9n279" Dec 02 12:24:43 crc kubenswrapper[4721]: I1202 12:24:43.162361 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9n279" Dec 02 12:24:43 crc kubenswrapper[4721]: I1202 12:24:43.202269 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9n279" Dec 02 12:24:44 crc kubenswrapper[4721]: I1202 12:24:44.208450 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9n279" Dec 02 12:24:44 crc kubenswrapper[4721]: I1202 12:24:44.256990 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9n279"] Dec 02 12:24:46 crc kubenswrapper[4721]: I1202 12:24:46.173736 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9n279" podUID="f5ce0109-b662-4270-9443-49a7164d2e6b" containerName="registry-server" containerID="cri-o://48d625d446b5624080973c9259e71c75ae402d140ba452c3bc9b4c2d310de587" gracePeriod=2 Dec 02 12:24:48 crc kubenswrapper[4721]: I1202 12:24:48.186849 4721 generic.go:334] "Generic (PLEG): container finished" podID="f5ce0109-b662-4270-9443-49a7164d2e6b" containerID="48d625d446b5624080973c9259e71c75ae402d140ba452c3bc9b4c2d310de587" exitCode=0 Dec 02 12:24:48 crc kubenswrapper[4721]: I1202 12:24:48.186935 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9n279" event={"ID":"f5ce0109-b662-4270-9443-49a7164d2e6b","Type":"ContainerDied","Data":"48d625d446b5624080973c9259e71c75ae402d140ba452c3bc9b4c2d310de587"} Dec 02 12:24:48 crc kubenswrapper[4721]: I1202 12:24:48.414519 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9n279" Dec 02 12:24:48 crc kubenswrapper[4721]: I1202 12:24:48.502124 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7cgcp\" (UniqueName: \"kubernetes.io/projected/f5ce0109-b662-4270-9443-49a7164d2e6b-kube-api-access-7cgcp\") pod \"f5ce0109-b662-4270-9443-49a7164d2e6b\" (UID: \"f5ce0109-b662-4270-9443-49a7164d2e6b\") " Dec 02 12:24:48 crc kubenswrapper[4721]: I1202 12:24:48.502280 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5ce0109-b662-4270-9443-49a7164d2e6b-utilities\") pod \"f5ce0109-b662-4270-9443-49a7164d2e6b\" (UID: \"f5ce0109-b662-4270-9443-49a7164d2e6b\") " Dec 02 12:24:48 crc kubenswrapper[4721]: I1202 12:24:48.502330 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5ce0109-b662-4270-9443-49a7164d2e6b-catalog-content\") pod \"f5ce0109-b662-4270-9443-49a7164d2e6b\" (UID: \"f5ce0109-b662-4270-9443-49a7164d2e6b\") " Dec 02 12:24:48 crc kubenswrapper[4721]: I1202 12:24:48.504000 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5ce0109-b662-4270-9443-49a7164d2e6b-utilities" (OuterVolumeSpecName: "utilities") pod "f5ce0109-b662-4270-9443-49a7164d2e6b" (UID: "f5ce0109-b662-4270-9443-49a7164d2e6b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:24:48 crc kubenswrapper[4721]: I1202 12:24:48.515253 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5ce0109-b662-4270-9443-49a7164d2e6b-kube-api-access-7cgcp" (OuterVolumeSpecName: "kube-api-access-7cgcp") pod "f5ce0109-b662-4270-9443-49a7164d2e6b" (UID: "f5ce0109-b662-4270-9443-49a7164d2e6b"). InnerVolumeSpecName "kube-api-access-7cgcp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:24:48 crc kubenswrapper[4721]: I1202 12:24:48.604062 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7cgcp\" (UniqueName: \"kubernetes.io/projected/f5ce0109-b662-4270-9443-49a7164d2e6b-kube-api-access-7cgcp\") on node \"crc\" DevicePath \"\"" Dec 02 12:24:48 crc kubenswrapper[4721]: I1202 12:24:48.604101 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5ce0109-b662-4270-9443-49a7164d2e6b-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 12:24:48 crc kubenswrapper[4721]: I1202 12:24:48.672836 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5ce0109-b662-4270-9443-49a7164d2e6b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f5ce0109-b662-4270-9443-49a7164d2e6b" (UID: "f5ce0109-b662-4270-9443-49a7164d2e6b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:24:48 crc kubenswrapper[4721]: I1202 12:24:48.705250 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5ce0109-b662-4270-9443-49a7164d2e6b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 12:24:49 crc kubenswrapper[4721]: I1202 12:24:49.194174 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9n279" event={"ID":"f5ce0109-b662-4270-9443-49a7164d2e6b","Type":"ContainerDied","Data":"228d0cf4b3d4047d95e903315066c1971fb1da16fbbd3cfa6a7d5edb179d2df0"} Dec 02 12:24:49 crc kubenswrapper[4721]: I1202 12:24:49.194208 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9n279" Dec 02 12:24:49 crc kubenswrapper[4721]: I1202 12:24:49.194233 4721 scope.go:117] "RemoveContainer" containerID="48d625d446b5624080973c9259e71c75ae402d140ba452c3bc9b4c2d310de587" Dec 02 12:24:49 crc kubenswrapper[4721]: I1202 12:24:49.210310 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9n279"] Dec 02 12:24:49 crc kubenswrapper[4721]: I1202 12:24:49.216782 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9n279"] Dec 02 12:24:49 crc kubenswrapper[4721]: I1202 12:24:49.222806 4721 scope.go:117] "RemoveContainer" containerID="cc48dcdb661c723c2763eb0d870cf798cc9b12e24635c987f96b019812cc8704" Dec 02 12:24:49 crc kubenswrapper[4721]: I1202 12:24:49.238259 4721 scope.go:117] "RemoveContainer" containerID="044a8535c861e3e4368796a9b8fb76e4b31d8a2846d58c02a6fe565bbd95dfaa" Dec 02 12:24:50 crc kubenswrapper[4721]: I1202 12:24:50.845953 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5ce0109-b662-4270-9443-49a7164d2e6b" path="/var/lib/kubelet/pods/f5ce0109-b662-4270-9443-49a7164d2e6b/volumes" Dec 02 12:26:14 crc kubenswrapper[4721]: I1202 12:26:14.557144 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-n99pl"] Dec 02 12:26:14 crc kubenswrapper[4721]: E1202 12:26:14.557997 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f052330-db77-46bd-abce-2a6413a6da3b" containerName="gather" Dec 02 12:26:14 crc kubenswrapper[4721]: I1202 12:26:14.558015 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f052330-db77-46bd-abce-2a6413a6da3b" containerName="gather" Dec 02 12:26:14 crc kubenswrapper[4721]: E1202 12:26:14.558035 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5ce0109-b662-4270-9443-49a7164d2e6b" containerName="extract-utilities" Dec 02 12:26:14 crc kubenswrapper[4721]: I1202 12:26:14.558043 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5ce0109-b662-4270-9443-49a7164d2e6b" containerName="extract-utilities" Dec 02 12:26:14 crc kubenswrapper[4721]: E1202 12:26:14.558055 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5ce0109-b662-4270-9443-49a7164d2e6b" containerName="registry-server" Dec 02 12:26:14 crc kubenswrapper[4721]: I1202 12:26:14.558063 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5ce0109-b662-4270-9443-49a7164d2e6b" containerName="registry-server" Dec 02 12:26:14 crc kubenswrapper[4721]: E1202 12:26:14.558074 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5ce0109-b662-4270-9443-49a7164d2e6b" containerName="extract-content" Dec 02 12:26:14 crc kubenswrapper[4721]: I1202 12:26:14.558084 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5ce0109-b662-4270-9443-49a7164d2e6b" containerName="extract-content" Dec 02 12:26:14 crc kubenswrapper[4721]: E1202 12:26:14.558096 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f052330-db77-46bd-abce-2a6413a6da3b" containerName="copy" Dec 02 12:26:14 crc kubenswrapper[4721]: I1202 12:26:14.558105 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f052330-db77-46bd-abce-2a6413a6da3b" containerName="copy" Dec 02 12:26:14 crc kubenswrapper[4721]: I1202 12:26:14.558301 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f052330-db77-46bd-abce-2a6413a6da3b" containerName="gather" Dec 02 12:26:14 crc kubenswrapper[4721]: I1202 12:26:14.558320 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f052330-db77-46bd-abce-2a6413a6da3b" containerName="copy" Dec 02 12:26:14 crc kubenswrapper[4721]: I1202 12:26:14.558330 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5ce0109-b662-4270-9443-49a7164d2e6b" containerName="registry-server" Dec 02 12:26:14 crc kubenswrapper[4721]: I1202 12:26:14.559248 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n99pl" Dec 02 12:26:14 crc kubenswrapper[4721]: I1202 12:26:14.585974 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n99pl"] Dec 02 12:26:14 crc kubenswrapper[4721]: I1202 12:26:14.646938 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ece478b1-e9d4-436f-9564-bf0ef4e1c3ae-catalog-content\") pod \"redhat-marketplace-n99pl\" (UID: \"ece478b1-e9d4-436f-9564-bf0ef4e1c3ae\") " pod="openshift-marketplace/redhat-marketplace-n99pl" Dec 02 12:26:14 crc kubenswrapper[4721]: I1202 12:26:14.646993 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49dnh\" (UniqueName: \"kubernetes.io/projected/ece478b1-e9d4-436f-9564-bf0ef4e1c3ae-kube-api-access-49dnh\") pod \"redhat-marketplace-n99pl\" (UID: \"ece478b1-e9d4-436f-9564-bf0ef4e1c3ae\") " pod="openshift-marketplace/redhat-marketplace-n99pl" Dec 02 12:26:14 crc kubenswrapper[4721]: I1202 12:26:14.647140 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ece478b1-e9d4-436f-9564-bf0ef4e1c3ae-utilities\") pod \"redhat-marketplace-n99pl\" (UID: \"ece478b1-e9d4-436f-9564-bf0ef4e1c3ae\") " pod="openshift-marketplace/redhat-marketplace-n99pl" Dec 02 12:26:14 crc kubenswrapper[4721]: I1202 12:26:14.748224 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ece478b1-e9d4-436f-9564-bf0ef4e1c3ae-utilities\") pod \"redhat-marketplace-n99pl\" (UID: \"ece478b1-e9d4-436f-9564-bf0ef4e1c3ae\") " pod="openshift-marketplace/redhat-marketplace-n99pl" Dec 02 12:26:14 crc kubenswrapper[4721]: I1202 12:26:14.748315 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ece478b1-e9d4-436f-9564-bf0ef4e1c3ae-catalog-content\") pod \"redhat-marketplace-n99pl\" (UID: \"ece478b1-e9d4-436f-9564-bf0ef4e1c3ae\") " pod="openshift-marketplace/redhat-marketplace-n99pl" Dec 02 12:26:14 crc kubenswrapper[4721]: I1202 12:26:14.748388 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49dnh\" (UniqueName: \"kubernetes.io/projected/ece478b1-e9d4-436f-9564-bf0ef4e1c3ae-kube-api-access-49dnh\") pod \"redhat-marketplace-n99pl\" (UID: \"ece478b1-e9d4-436f-9564-bf0ef4e1c3ae\") " pod="openshift-marketplace/redhat-marketplace-n99pl" Dec 02 12:26:14 crc kubenswrapper[4721]: I1202 12:26:14.748771 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ece478b1-e9d4-436f-9564-bf0ef4e1c3ae-utilities\") pod \"redhat-marketplace-n99pl\" (UID: \"ece478b1-e9d4-436f-9564-bf0ef4e1c3ae\") " pod="openshift-marketplace/redhat-marketplace-n99pl" Dec 02 12:26:14 crc kubenswrapper[4721]: I1202 12:26:14.748873 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ece478b1-e9d4-436f-9564-bf0ef4e1c3ae-catalog-content\") pod \"redhat-marketplace-n99pl\" (UID: \"ece478b1-e9d4-436f-9564-bf0ef4e1c3ae\") " pod="openshift-marketplace/redhat-marketplace-n99pl" Dec 02 12:26:14 crc kubenswrapper[4721]: I1202 12:26:14.767533 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49dnh\" (UniqueName: \"kubernetes.io/projected/ece478b1-e9d4-436f-9564-bf0ef4e1c3ae-kube-api-access-49dnh\") pod \"redhat-marketplace-n99pl\" (UID: \"ece478b1-e9d4-436f-9564-bf0ef4e1c3ae\") " pod="openshift-marketplace/redhat-marketplace-n99pl" Dec 02 12:26:14 crc kubenswrapper[4721]: I1202 12:26:14.880717 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n99pl" Dec 02 12:26:15 crc kubenswrapper[4721]: I1202 12:26:15.066853 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n99pl"] Dec 02 12:26:15 crc kubenswrapper[4721]: W1202 12:26:15.068933 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podece478b1_e9d4_436f_9564_bf0ef4e1c3ae.slice/crio-c438ffa11b40c93fa2db9d764a8c94a0826c86ccb346eb84038754d9e9955cf2 WatchSource:0}: Error finding container c438ffa11b40c93fa2db9d764a8c94a0826c86ccb346eb84038754d9e9955cf2: Status 404 returned error can't find the container with id c438ffa11b40c93fa2db9d764a8c94a0826c86ccb346eb84038754d9e9955cf2 Dec 02 12:26:15 crc kubenswrapper[4721]: I1202 12:26:15.744112 4721 generic.go:334] "Generic (PLEG): container finished" podID="ece478b1-e9d4-436f-9564-bf0ef4e1c3ae" containerID="7d2d5716139c70dcdc81d8ed381fb3afae9acb9fbd94790e1210ee1abbe63a7b" exitCode=0 Dec 02 12:26:15 crc kubenswrapper[4721]: I1202 12:26:15.744153 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n99pl" event={"ID":"ece478b1-e9d4-436f-9564-bf0ef4e1c3ae","Type":"ContainerDied","Data":"7d2d5716139c70dcdc81d8ed381fb3afae9acb9fbd94790e1210ee1abbe63a7b"} Dec 02 12:26:15 crc kubenswrapper[4721]: I1202 12:26:15.744178 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n99pl" event={"ID":"ece478b1-e9d4-436f-9564-bf0ef4e1c3ae","Type":"ContainerStarted","Data":"c438ffa11b40c93fa2db9d764a8c94a0826c86ccb346eb84038754d9e9955cf2"} Dec 02 12:26:17 crc kubenswrapper[4721]: I1202 12:26:17.762954 4721 generic.go:334] "Generic (PLEG): container finished" podID="ece478b1-e9d4-436f-9564-bf0ef4e1c3ae" containerID="26430937554c7976c21437dc406d7ef757c329aee05c296e11126dd648173289" exitCode=0 Dec 02 12:26:17 crc kubenswrapper[4721]: I1202 12:26:17.763065 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n99pl" event={"ID":"ece478b1-e9d4-436f-9564-bf0ef4e1c3ae","Type":"ContainerDied","Data":"26430937554c7976c21437dc406d7ef757c329aee05c296e11126dd648173289"} Dec 02 12:26:18 crc kubenswrapper[4721]: I1202 12:26:18.778177 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n99pl" event={"ID":"ece478b1-e9d4-436f-9564-bf0ef4e1c3ae","Type":"ContainerStarted","Data":"85189d1412409ab25dbd4486db8fa5069face8b062dbe1a6c856914a90be8ee0"} Dec 02 12:26:18 crc kubenswrapper[4721]: I1202 12:26:18.795445 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-n99pl" podStartSLOduration=2.372051062 podStartE2EDuration="4.795421039s" podCreationTimestamp="2025-12-02 12:26:14 +0000 UTC" firstStartedPulling="2025-12-02 12:26:15.746885188 +0000 UTC m=+2105.485435567" lastFinishedPulling="2025-12-02 12:26:18.170255175 +0000 UTC m=+2107.908805544" observedRunningTime="2025-12-02 12:26:18.79125322 +0000 UTC m=+2108.529803599" watchObservedRunningTime="2025-12-02 12:26:18.795421039 +0000 UTC m=+2108.533971408" Dec 02 12:26:24 crc kubenswrapper[4721]: I1202 12:26:24.880852 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-n99pl" Dec 02 12:26:24 crc kubenswrapper[4721]: I1202 12:26:24.881600 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-n99pl" Dec 02 12:26:24 crc kubenswrapper[4721]: I1202 12:26:24.926259 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-n99pl" Dec 02 12:26:25 crc kubenswrapper[4721]: I1202 12:26:25.702720 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mcr6s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 12:26:25 crc kubenswrapper[4721]: I1202 12:26:25.703101 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 12:26:25 crc kubenswrapper[4721]: I1202 12:26:25.873084 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-n99pl" Dec 02 12:26:27 crc kubenswrapper[4721]: I1202 12:26:27.515735 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n99pl"] Dec 02 12:26:28 crc kubenswrapper[4721]: I1202 12:26:28.844883 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-n99pl" podUID="ece478b1-e9d4-436f-9564-bf0ef4e1c3ae" containerName="registry-server" containerID="cri-o://85189d1412409ab25dbd4486db8fa5069face8b062dbe1a6c856914a90be8ee0" gracePeriod=2 Dec 02 12:26:29 crc kubenswrapper[4721]: I1202 12:26:29.823570 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n99pl" Dec 02 12:26:29 crc kubenswrapper[4721]: I1202 12:26:29.862087 4721 generic.go:334] "Generic (PLEG): container finished" podID="ece478b1-e9d4-436f-9564-bf0ef4e1c3ae" containerID="85189d1412409ab25dbd4486db8fa5069face8b062dbe1a6c856914a90be8ee0" exitCode=0 Dec 02 12:26:29 crc kubenswrapper[4721]: I1202 12:26:29.862207 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n99pl" Dec 02 12:26:29 crc kubenswrapper[4721]: I1202 12:26:29.862201 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n99pl" event={"ID":"ece478b1-e9d4-436f-9564-bf0ef4e1c3ae","Type":"ContainerDied","Data":"85189d1412409ab25dbd4486db8fa5069face8b062dbe1a6c856914a90be8ee0"} Dec 02 12:26:29 crc kubenswrapper[4721]: I1202 12:26:29.862972 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n99pl" event={"ID":"ece478b1-e9d4-436f-9564-bf0ef4e1c3ae","Type":"ContainerDied","Data":"c438ffa11b40c93fa2db9d764a8c94a0826c86ccb346eb84038754d9e9955cf2"} Dec 02 12:26:29 crc kubenswrapper[4721]: I1202 12:26:29.862996 4721 scope.go:117] "RemoveContainer" containerID="85189d1412409ab25dbd4486db8fa5069face8b062dbe1a6c856914a90be8ee0" Dec 02 12:26:29 crc kubenswrapper[4721]: I1202 12:26:29.894379 4721 scope.go:117] "RemoveContainer" containerID="26430937554c7976c21437dc406d7ef757c329aee05c296e11126dd648173289" Dec 02 12:26:29 crc kubenswrapper[4721]: I1202 12:26:29.909911 4721 scope.go:117] "RemoveContainer" containerID="7d2d5716139c70dcdc81d8ed381fb3afae9acb9fbd94790e1210ee1abbe63a7b" Dec 02 12:26:29 crc kubenswrapper[4721]: I1202 12:26:29.952609 4721 scope.go:117] "RemoveContainer" containerID="85189d1412409ab25dbd4486db8fa5069face8b062dbe1a6c856914a90be8ee0" Dec 02 12:26:29 crc kubenswrapper[4721]: E1202 12:26:29.953867 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85189d1412409ab25dbd4486db8fa5069face8b062dbe1a6c856914a90be8ee0\": container with ID starting with 85189d1412409ab25dbd4486db8fa5069face8b062dbe1a6c856914a90be8ee0 not found: ID does not exist" containerID="85189d1412409ab25dbd4486db8fa5069face8b062dbe1a6c856914a90be8ee0" Dec 02 12:26:29 crc kubenswrapper[4721]: I1202 12:26:29.953937 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85189d1412409ab25dbd4486db8fa5069face8b062dbe1a6c856914a90be8ee0"} err="failed to get container status \"85189d1412409ab25dbd4486db8fa5069face8b062dbe1a6c856914a90be8ee0\": rpc error: code = NotFound desc = could not find container \"85189d1412409ab25dbd4486db8fa5069face8b062dbe1a6c856914a90be8ee0\": container with ID starting with 85189d1412409ab25dbd4486db8fa5069face8b062dbe1a6c856914a90be8ee0 not found: ID does not exist" Dec 02 12:26:29 crc kubenswrapper[4721]: I1202 12:26:29.953977 4721 scope.go:117] "RemoveContainer" containerID="26430937554c7976c21437dc406d7ef757c329aee05c296e11126dd648173289" Dec 02 12:26:29 crc kubenswrapper[4721]: E1202 12:26:29.954544 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26430937554c7976c21437dc406d7ef757c329aee05c296e11126dd648173289\": container with ID starting with 26430937554c7976c21437dc406d7ef757c329aee05c296e11126dd648173289 not found: ID does not exist" containerID="26430937554c7976c21437dc406d7ef757c329aee05c296e11126dd648173289" Dec 02 12:26:29 crc kubenswrapper[4721]: I1202 12:26:29.954668 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26430937554c7976c21437dc406d7ef757c329aee05c296e11126dd648173289"} err="failed to get container status \"26430937554c7976c21437dc406d7ef757c329aee05c296e11126dd648173289\": rpc error: code = NotFound desc = could not find container \"26430937554c7976c21437dc406d7ef757c329aee05c296e11126dd648173289\": container with ID starting with 26430937554c7976c21437dc406d7ef757c329aee05c296e11126dd648173289 not found: ID does not exist" Dec 02 12:26:29 crc kubenswrapper[4721]: I1202 12:26:29.954755 4721 scope.go:117] "RemoveContainer" containerID="7d2d5716139c70dcdc81d8ed381fb3afae9acb9fbd94790e1210ee1abbe63a7b" Dec 02 12:26:29 crc kubenswrapper[4721]: E1202 12:26:29.955287 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d2d5716139c70dcdc81d8ed381fb3afae9acb9fbd94790e1210ee1abbe63a7b\": container with ID starting with 7d2d5716139c70dcdc81d8ed381fb3afae9acb9fbd94790e1210ee1abbe63a7b not found: ID does not exist" containerID="7d2d5716139c70dcdc81d8ed381fb3afae9acb9fbd94790e1210ee1abbe63a7b" Dec 02 12:26:29 crc kubenswrapper[4721]: I1202 12:26:29.955322 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d2d5716139c70dcdc81d8ed381fb3afae9acb9fbd94790e1210ee1abbe63a7b"} err="failed to get container status \"7d2d5716139c70dcdc81d8ed381fb3afae9acb9fbd94790e1210ee1abbe63a7b\": rpc error: code = NotFound desc = could not find container \"7d2d5716139c70dcdc81d8ed381fb3afae9acb9fbd94790e1210ee1abbe63a7b\": container with ID starting with 7d2d5716139c70dcdc81d8ed381fb3afae9acb9fbd94790e1210ee1abbe63a7b not found: ID does not exist" Dec 02 12:26:30 crc kubenswrapper[4721]: I1202 12:26:30.006704 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ece478b1-e9d4-436f-9564-bf0ef4e1c3ae-utilities\") pod \"ece478b1-e9d4-436f-9564-bf0ef4e1c3ae\" (UID: \"ece478b1-e9d4-436f-9564-bf0ef4e1c3ae\") " Dec 02 12:26:30 crc kubenswrapper[4721]: I1202 12:26:30.006813 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ece478b1-e9d4-436f-9564-bf0ef4e1c3ae-catalog-content\") pod \"ece478b1-e9d4-436f-9564-bf0ef4e1c3ae\" (UID: \"ece478b1-e9d4-436f-9564-bf0ef4e1c3ae\") " Dec 02 12:26:30 crc kubenswrapper[4721]: I1202 12:26:30.007133 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-49dnh\" (UniqueName: \"kubernetes.io/projected/ece478b1-e9d4-436f-9564-bf0ef4e1c3ae-kube-api-access-49dnh\") pod \"ece478b1-e9d4-436f-9564-bf0ef4e1c3ae\" (UID: \"ece478b1-e9d4-436f-9564-bf0ef4e1c3ae\") " Dec 02 12:26:30 crc kubenswrapper[4721]: I1202 12:26:30.008281 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ece478b1-e9d4-436f-9564-bf0ef4e1c3ae-utilities" (OuterVolumeSpecName: "utilities") pod "ece478b1-e9d4-436f-9564-bf0ef4e1c3ae" (UID: "ece478b1-e9d4-436f-9564-bf0ef4e1c3ae"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:26:30 crc kubenswrapper[4721]: I1202 12:26:30.015258 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ece478b1-e9d4-436f-9564-bf0ef4e1c3ae-kube-api-access-49dnh" (OuterVolumeSpecName: "kube-api-access-49dnh") pod "ece478b1-e9d4-436f-9564-bf0ef4e1c3ae" (UID: "ece478b1-e9d4-436f-9564-bf0ef4e1c3ae"). InnerVolumeSpecName "kube-api-access-49dnh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 12:26:30 crc kubenswrapper[4721]: I1202 12:26:30.030148 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ece478b1-e9d4-436f-9564-bf0ef4e1c3ae-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ece478b1-e9d4-436f-9564-bf0ef4e1c3ae" (UID: "ece478b1-e9d4-436f-9564-bf0ef4e1c3ae"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 12:26:30 crc kubenswrapper[4721]: I1202 12:26:30.108572 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-49dnh\" (UniqueName: \"kubernetes.io/projected/ece478b1-e9d4-436f-9564-bf0ef4e1c3ae-kube-api-access-49dnh\") on node \"crc\" DevicePath \"\"" Dec 02 12:26:30 crc kubenswrapper[4721]: I1202 12:26:30.108975 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ece478b1-e9d4-436f-9564-bf0ef4e1c3ae-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 12:26:30 crc kubenswrapper[4721]: I1202 12:26:30.109088 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ece478b1-e9d4-436f-9564-bf0ef4e1c3ae-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 12:26:30 crc kubenswrapper[4721]: I1202 12:26:30.202062 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n99pl"] Dec 02 12:26:30 crc kubenswrapper[4721]: I1202 12:26:30.207643 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-n99pl"] Dec 02 12:26:30 crc kubenswrapper[4721]: I1202 12:26:30.844854 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ece478b1-e9d4-436f-9564-bf0ef4e1c3ae" path="/var/lib/kubelet/pods/ece478b1-e9d4-436f-9564-bf0ef4e1c3ae/volumes" Dec 02 12:26:55 crc kubenswrapper[4721]: I1202 12:26:55.703155 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mcr6s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 12:26:55 crc kubenswrapper[4721]: I1202 12:26:55.703926 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 12:27:25 crc kubenswrapper[4721]: I1202 12:27:25.703536 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mcr6s container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 12:27:25 crc kubenswrapper[4721]: I1202 12:27:25.704265 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 12:27:25 crc kubenswrapper[4721]: I1202 12:27:25.704326 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" Dec 02 12:27:25 crc kubenswrapper[4721]: I1202 12:27:25.705280 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"62151d4a43d83a5d0223d3c4879bfaddf3fd628843a8798a6571d0b223e88ec5"} pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 12:27:25 crc kubenswrapper[4721]: I1202 12:27:25.705447 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" podUID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerName="machine-config-daemon" containerID="cri-o://62151d4a43d83a5d0223d3c4879bfaddf3fd628843a8798a6571d0b223e88ec5" gracePeriod=600 Dec 02 12:27:26 crc kubenswrapper[4721]: I1202 12:27:26.183700 4721 generic.go:334] "Generic (PLEG): container finished" podID="141842b6-fd0c-48e6-9795-a627e6f96bd4" containerID="62151d4a43d83a5d0223d3c4879bfaddf3fd628843a8798a6571d0b223e88ec5" exitCode=0 Dec 02 12:27:26 crc kubenswrapper[4721]: I1202 12:27:26.183992 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" event={"ID":"141842b6-fd0c-48e6-9795-a627e6f96bd4","Type":"ContainerDied","Data":"62151d4a43d83a5d0223d3c4879bfaddf3fd628843a8798a6571d0b223e88ec5"} Dec 02 12:27:26 crc kubenswrapper[4721]: I1202 12:27:26.184024 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mcr6s" event={"ID":"141842b6-fd0c-48e6-9795-a627e6f96bd4","Type":"ContainerStarted","Data":"5f51966867d2253bb7792f2231ccf8f3a3eae257fe06bd34b48ff9dc133ca442"} Dec 02 12:27:26 crc kubenswrapper[4721]: I1202 12:27:26.184045 4721 scope.go:117] "RemoveContainer" containerID="d47f131065d3bb4b15a73c118095a78d7e663ac5677f8177bd44ff85506f53b0" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515113555607024455 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015113555607017372 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015113550775016516 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015113550776015467 5ustar corecore